diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 5ea58046f49..53e964188f6 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -3,7 +3,11 @@ GitHub is reserved for bug reports and feature requests. The best place to ask a general question is at the Elastic Discourse forums at https://discuss.elastic.co. If you are in fact posting a bug report or a feature request, please include one and only one of the below blocks -in your new issue. +in your new issue. Note that whether you're filing a bug report or a +feature request, ensure that your submission is for an +[OS that we support](https://www.elastic.co/support/matrix#show_os). +Bug reports on an OS that we do not support or feature requests +specific to an OS that we do not support will be closed. --> + + + + waiting for 3 nodes to be up"); - assertBusy(new Runnable() { - @Override - public void run() { - NodesStatsResponse resp = client().admin().cluster().prepareNodesStats().get(); - assertThat(resp.getNodes().size(), equalTo(3)); - } + assertBusy(() -> { + NodesStatsResponse resp = client().admin().cluster().prepareNodesStats().get(); + assertThat(resp.getNodes().size(), equalTo(3)); }); logger.info("--> creating 'test' index"); - prepareCreate("test").setSettings(Settings.builder() + assertAcked(prepareCreate("test").setSettings(Settings.builder() .put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "1m") .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 5) - .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 1)).get(); - ensureGreen("test"); + .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 1)) + .setWaitForActiveShards(ActiveShardCount.ALL).get()); logger.info("--> stopping a random node"); assertTrue(internalCluster().stopRandomDataNode()); @@ -92,6 +90,7 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase { .setSettings(Settings.builder() .put("index.number_of_shards", 5) .put("index.number_of_replicas", 1)) + .setWaitForActiveShards(ActiveShardCount.ALL) // wait on all shards .get(); client().admin().indices().prepareCreate("only-baz") @@ -99,6 +98,7 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase { .put("index.routing.allocation.include.bar", "baz") .put("index.number_of_shards", 5) .put("index.number_of_replicas", 1)) + .setWaitForActiveShards(ActiveShardCount.ALL) .get(); client().admin().indices().prepareCreate("only-foo") @@ -108,9 +108,6 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase { .put("index.number_of_replicas", 1)) .get(); - ensureGreen("anywhere", "only-baz"); - ensureYellow("only-foo"); - ClusterAllocationExplainResponse resp = client().admin().cluster().prepareAllocationExplain() .setIndex("only-foo") .setShard(0) @@ -126,7 +123,6 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase { Map explanations = cae.getNodeExplanations(); - Float noAttrWeight = -1f; Float barAttrWeight = -1f; Float fooBarAttrWeight = -1f; for (Map.Entry entry : explanations.entrySet()) { @@ -134,7 +130,6 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase { String nodeName = node.getName(); NodeExplanation explanation = entry.getValue(); ClusterAllocationExplanation.FinalDecision finalDecision = explanation.getFinalDecision(); - String finalExplanation = explanation.getFinalExplanation(); ClusterAllocationExplanation.StoreCopy storeCopy = explanation.getStoreCopy(); Decision d = explanation.getDecision(); float weight = explanation.getWeight(); @@ -143,7 +138,6 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase { assertEquals(d.type(), Decision.Type.NO); if (noAttrNode.equals(nodeName)) { assertThat(d.toString(), containsString("node does not match index include filters [foo:\"bar\"]")); - noAttrWeight = weight; assertNull(storeStatus); assertEquals("the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", explanation.getFinalExplanation()); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequestTests.java new file mode 100644 index 00000000000..926de8b253e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequestTests.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.allocation; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.test.ESTestCase; + +public class ClusterAllocationExplainRequestTests extends ESTestCase { + + public void testSerialization() throws Exception { + ClusterAllocationExplainRequest request = + new ClusterAllocationExplainRequest(randomAsciiOfLength(4), randomIntBetween(0, Integer.MAX_VALUE), randomBoolean()); + request.includeYesDecisions(randomBoolean()); + request.includeDiskInfo(randomBoolean()); + BytesStreamOutput output = new BytesStreamOutput(); + request.writeTo(output); + + ClusterAllocationExplainRequest actual = new ClusterAllocationExplainRequest(); + actual.readFrom(output.bytes().streamInput()); + assertEquals(request.getIndex(), actual.getIndex()); + assertEquals(request.getShard(), actual.getShard()); + assertEquals(request.isPrimary(), actual.isPrimary()); + assertEquals(request.includeYesDecisions(), actual.includeYesDecisions()); + assertEquals(request.includeDiskInfo(), actual.includeDiskInfo()); + } + +} diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java index d5cefc6d1f3..895450e6d5b 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java @@ -32,7 +32,6 @@ public final class ClusterAllocationExplainTests extends ESSingleNodeTestCase { public void testShardExplain() throws Exception { client().admin().indices().prepareCreate("test") .setSettings("index.number_of_shards", 1, "index.number_of_replicas", 1).get(); - client().admin().cluster().health(Requests.clusterHealthRequest("test").waitForYellowStatus()).get(); ClusterAllocationExplainResponse resp = client().admin().cluster().prepareAllocationExplain() .setIndex("test").setShard(0).setPrimary(false).get(); @@ -47,7 +46,6 @@ public final class ClusterAllocationExplainTests extends ESSingleNodeTestCase { NodeExplanation explanation = cae.getNodeExplanations().values().iterator().next(); ClusterAllocationExplanation.FinalDecision fd = explanation.getFinalDecision(); ClusterAllocationExplanation.StoreCopy storeCopy = explanation.getStoreCopy(); - String finalExplanation = explanation.getFinalExplanation(); Decision d = explanation.getDecision(); assertNotNull("should have a decision", d); assertEquals(Decision.Type.NO, d.type()); @@ -76,7 +74,6 @@ public final class ClusterAllocationExplainTests extends ESSingleNodeTestCase { d = explanation.getDecision(); fd = explanation.getFinalDecision(); storeCopy = explanation.getStoreCopy(); - finalExplanation = explanation.getFinalExplanation(); assertNotNull("should have a decision", d); assertEquals(Decision.Type.NO, d.type()); assertEquals(ClusterAllocationExplanation.FinalDecision.ALREADY_ASSIGNED, fd); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java index d0e8ef14d01..425edeb1065 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplanationTests.java @@ -31,7 +31,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -41,7 +41,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -66,7 +65,7 @@ public final class ClusterAllocationExplanationTests extends ESTestCase { .numberOfShards(1) .numberOfReplicas(1) .build(); - private DiscoveryNode node = new DiscoveryNode("node-0", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + private DiscoveryNode node = new DiscoveryNode("node-0", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); private static Decision.Multi yesDecision = new Decision.Multi(); private static Decision.Multi noDecision = new Decision.Multi(); @@ -202,10 +201,10 @@ public final class ClusterAllocationExplanationTests extends ESTestCase { yesDecision, nodeWeight, storeStatus, "", activeAllocationIds, false); nodeExplanations.put(ne.getNode(), ne); ClusterAllocationExplanation cae = new ClusterAllocationExplanation(shard, true, - "assignedNode", allocationDelay, remainingDelay, null, false, nodeExplanations); + "assignedNode", allocationDelay, remainingDelay, null, false, nodeExplanations, null); BytesStreamOutput out = new BytesStreamOutput(); cae.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); ClusterAllocationExplanation cae2 = new ClusterAllocationExplanation(in); assertEquals(shard, cae2.getShard()); assertTrue(cae2.isPrimary()); @@ -215,9 +214,7 @@ public final class ClusterAllocationExplanationTests extends ESTestCase { assertEquals(allocationDelay, cae2.getAllocationDelayMillis()); assertEquals(remainingDelay, cae2.getRemainingDelayMillis()); for (Map.Entry entry : cae2.getNodeExplanations().entrySet()) { - DiscoveryNode node = entry.getKey(); NodeExplanation explanation = entry.getValue(); - IndicesShardStoresResponse.StoreStatus status = explanation.getStoreStatus(); assertNotNull(explanation.getStoreStatus()); assertNotNull(explanation.getDecision()); assertEquals(nodeWeight, explanation.getWeight()); @@ -240,7 +237,7 @@ public final class ClusterAllocationExplanationTests extends ESTestCase { Map nodeExplanations = new HashMap<>(1); nodeExplanations.put(ne.getNode(), ne); ClusterAllocationExplanation cae = new ClusterAllocationExplanation(shardId, true, - "assignedNode", 42, 42, null, false, nodeExplanations); + "assignedNode", 42, 42, null, false, nodeExplanations, null); XContentBuilder builder = XContentFactory.jsonBuilder(); cae.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{\"shard\":{\"index\":\"foo\",\"index_uuid\":\"uuid\",\"id\":0,\"primary\":true},\"assigned\":true," + diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java index 704c1348b7e..d0d452df478 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthResponsesTests.java @@ -84,7 +84,7 @@ public class ClusterHealthResponsesTests extends ESTestCase { if (randomBoolean()) { BytesStreamOutput out = new BytesStreamOutput(); clusterHealth.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); clusterHealth = ClusterHealthResponse.readResponseFrom(in); } return clusterHealth; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index 594028f4e6f..9027b3d372e 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -230,7 +230,7 @@ public class CancellableTasksTests extends TaskManagerTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { throwableReference.set(e); responseLatch.countDown(); } @@ -308,7 +308,7 @@ public class CancellableTasksTests extends TaskManagerTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { throwableReference.set(e); responseLatch.countDown(); } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 6f8e3fda156..d479af2b1be 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -98,6 +98,11 @@ public class TasksIT extends ESIntegTestCase { private Map, RecordingTaskManagerListener> listeners = new HashMap<>(); + @Override + protected boolean addMockTransportService() { + return false; + } + @Override protected Collection> nodePlugins() { return pluginList(MockTransportService.TestPlugin.class, TestTaskPlugin.class); @@ -738,12 +743,12 @@ public class TasksIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { throw new RuntimeException(e); } }); b.await(); - + // Now we can find it! GetTaskResponse response = expectFinishedTask(new TaskId("fake:1")); assertEquals("test", response.getTask().getTask().getAction()); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index c4d49d899b9..2c78786ab04 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -249,7 +249,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { /** * Test class for testing task operations */ - static abstract class TestTasksAction extends TransportTasksAction { + abstract static class TestTasksAction extends TransportTasksAction { protected TestTasksAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, TransportService transportService) { @@ -338,7 +338,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { logger.warn("Couldn't get list of tasks", e); responseLatch.countDown(); } @@ -526,7 +526,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { responseLatch.countDown(); } }); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java index b736751b781..4f553dfb88a 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteRequestTests.java @@ -166,7 +166,7 @@ public class ClusterRerouteRequestTests extends ESTestCase { private ClusterRerouteRequest roundTripThroughBytes(ClusterRerouteRequest original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { ClusterRerouteRequest copy = new ClusterRerouteRequest(); copy.readFrom(in); return copy; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java index 00fcbf60a5a..657fec558b8 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteTests.java @@ -66,7 +66,7 @@ public class ClusterRerouteTests extends ESAllocationTestCase { BytesReference bytes = out.bytes(); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); new NetworkModule(null, Settings.EMPTY, true, namedWriteableRegistry); - StreamInput wrap = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes.toBytes()), + StreamInput wrap = new NamedWriteableAwareStreamInput(bytes.streamInput(), namedWriteableRegistry); ClusterRerouteRequest deserializedReq = new ClusterRerouteRequest(); deserializedReq.readFrom(wrap); @@ -94,7 +94,7 @@ public class ClusterRerouteTests extends ESAllocationTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { } }; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java index fc04de81254..b515829b72a 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/state/ClusterStateRequestTests.java @@ -45,7 +45,7 @@ public class ClusterStateRequestTests extends ESTestCase { output.setVersion(testVersion); clusterStateRequest.writeTo(output); - StreamInput streamInput = StreamInput.wrap(output.bytes()); + StreamInput streamInput = output.bytes().streamInput(); streamInput.setVersion(testVersion); ClusterStateRequest deserializedCSRequest = new ClusterStateRequest(); deserializedCSRequest.readFrom(streamInput); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java index 0a63dd46095..f2ed690bb9c 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java @@ -120,7 +120,7 @@ public class ClusterStatsIT extends ESIntegTestCase { assertThat(response.getStatus(), Matchers.equalTo(ClusterHealthStatus.GREEN)); prepareCreate("test1").setSettings("number_of_shards", 2, "number_of_replicas", 1).get(); - ensureYellow(); + response = client().admin().cluster().prepareClusterStats().get(); assertThat(response.getStatus(), Matchers.equalTo(ClusterHealthStatus.YELLOW)); assertThat(response.indicesStats.getDocs().getCount(), Matchers.equalTo(0L)); @@ -161,12 +161,7 @@ public class ClusterStatsIT extends ESIntegTestCase { public void testValuesSmokeScreen() throws IOException, ExecutionException, InterruptedException { internalCluster().startNodesAsync(randomIntBetween(1, 3)).get(); index("test1", "type", "1", "f", "f"); - /* - * Ensure at least one shard is allocated otherwise the FS stats might - * return 0. This happens if the File#getTotalSpace() and friends is called - * on a directory that doesn't exist or has not yet been created. - */ - ensureYellow("test1"); + ClusterStatsResponse response = client().admin().cluster().prepareClusterStats().get(); String msg = response.toString(); assertThat(msg, response.getTimestamp(), Matchers.greaterThan(946681200000L)); // 1 Jan 2000 diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptRequestTests.java index e4c2849b907..2e9239a2c3b 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptRequestTests.java @@ -37,7 +37,7 @@ public class GetStoredScriptRequestTests extends ESTestCase { out.setVersion(randomVersion(random())); request.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); in.setVersion(out.getVersion()); GetStoredScriptRequest request2 = new GetStoredScriptRequest(); request2.readFrom(in); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index 5e2c503eba1..24edbf47a2d 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -73,7 +73,8 @@ public class TransportAnalyzeActionTests extends ESTestCase { request.analyzer(null); request.tokenizer("whitespace"); - request.tokenFilters("lowercase", "word_delimiter"); + request.addTokenFilter("lowercase"); + request.addTokenFilter("word_delimiter"); request.text("the qu1ck brown fox"); analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, randomBoolean() ? analysisService : null, registry, environment); tokens = analyze.getTokens(); @@ -84,8 +85,9 @@ public class TransportAnalyzeActionTests extends ESTestCase { request.analyzer(null); request.tokenizer("whitespace"); - request.charFilters("html_strip"); - request.tokenFilters("lowercase", "word_delimiter"); + request.addCharFilter("html_strip"); + request.addTokenFilter("lowercase"); + request.addTokenFilter("word_delimiter"); request.text("

the qu1ck brown fox

"); analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, randomBoolean() ? analysisService : null, registry, environment); tokens = analyze.getTokens(); @@ -155,7 +157,8 @@ public class TransportAnalyzeActionTests extends ESTestCase { request.analyzer(null); request.tokenizer("whitespace"); - request.tokenFilters("lowercase", "wordDelimiter"); + request.addTokenFilter("lowercase"); + request.addTokenFilter("wordDelimiter"); request.text("the qu1ck brown fox-dog"); analyze = TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, analysisService, registry, environment); tokens = analyze.getTokens(); @@ -211,7 +214,7 @@ public class TransportAnalyzeActionTests extends ESTestCase { try { AnalyzeRequest request = new AnalyzeRequest(); request.tokenizer("whitespace"); - request.tokenFilters("foobar"); + request.addTokenFilter("foobar"); request.text("the qu1ck brown fox"); TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, notGlobal ? analysisService : null, registry, environment); fail("no such analyzer"); @@ -226,8 +229,8 @@ public class TransportAnalyzeActionTests extends ESTestCase { try { AnalyzeRequest request = new AnalyzeRequest(); request.tokenizer("whitespace"); - request.tokenFilters("lowercase"); - request.charFilters("foobar"); + request.addTokenFilter("lowercase"); + request.addCharFilter("foobar"); request.text("the qu1ck brown fox"); TransportAnalyzeAction.analyze(request, AllFieldMapper.NAME, null, notGlobal ? analysisService : null, registry, environment); fail("no such analyzer"); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index 3e7323dceeb..7231bee0bef 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterState; @@ -41,14 +42,11 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; -import org.elasticsearch.node.service.NodeService; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.junit.Ignore; import java.util.HashMap; -import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; @@ -232,7 +230,7 @@ public class CreateIndexIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { throw new RuntimeException(e); } } @@ -292,7 +290,7 @@ public class CreateIndexIT extends ESIntegTestCase { public void testRestartIndexCreationAfterFullClusterRestart() throws Exception { client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder().put("cluster.routing.allocation.enable", "none")).get(); - client().admin().indices().prepareCreate("test").setSettings(indexSettings()).get(); + client().admin().indices().prepareCreate("test").setWaitForActiveShards(ActiveShardCount.NONE).setSettings(indexSettings()).get(); internalCluster().fullRestart(); ensureGreen("test"); } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java index 04f6037f64b..7040c92ec1d 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushUnitTests.java @@ -84,7 +84,7 @@ public class SyncedFlushUnitTests extends ESTestCase { assertThat(testPlan.result.restStatus(), equalTo(testPlan.totalCounts.failed > 0 ? RestStatus.CONFLICT : RestStatus.OK)); BytesStreamOutput out = new BytesStreamOutput(); testPlan.result.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); SyncedFlushResponse readResponse = new SyncedFlushResponse(); readResponse.readFrom(in); assertThat(readResponse.totalShards(), equalTo(testPlan.totalCounts.total)); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java index 7d163630afb..8a4a62f9728 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverActionTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.rollover; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.metadata.AliasAction; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -166,6 +167,8 @@ public class TransportRolloverActionTests extends ESTestCase { String alias = randomAsciiOfLength(10); String rolloverIndex = randomAsciiOfLength(10); final RolloverRequest rolloverRequest = new RolloverRequest(alias, randomAsciiOfLength(10)); + final ActiveShardCount activeShardCount = randomBoolean() ? ActiveShardCount.ALL : ActiveShardCount.ONE; + rolloverRequest.setWaitForActiveShards(activeShardCount); final Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index 6e3e5d76224..1cd1704e164 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -215,7 +215,7 @@ public class IndicesShardStoreRequestIT extends ESIntegTestCase { client().admin().indices().prepareFlush().setForce(true).setWaitIfOngoing(true).execute().actionGet(); } - private final static class IndexNodePredicate implements Predicate { + private static final class IndexNodePredicate implements Predicate { private final Set nodesWithShard; public IndexNodePredicate(String index) { diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java index 3c12d7d9b10..9705009a044 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreResponseTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -53,8 +53,8 @@ public class IndicesShardStoreResponseTests extends ESTestCase { List failures = new ArrayList<>(); ImmutableOpenIntMap.Builder> storeStatuses = ImmutableOpenIntMap.builder(); - DiscoveryNode node1 = new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); - DiscoveryNode node2 = new DiscoveryNode("node2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node1 = new DiscoveryNode("node1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node2 = new DiscoveryNode("node2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); List storeStatusList = new ArrayList<>(); storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node1, 3, null, IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null)); storeStatusList.add(new IndicesShardStoresResponse.StoreStatus(node2, ShardStateMetaData.NO_VERSION, UUIDs.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA, null)); @@ -122,7 +122,7 @@ public class IndicesShardStoreResponseTests extends ESTestCase { } public void testStoreStatusOrdering() throws Exception { - DiscoveryNode node1 = new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node1 = new DiscoveryNode("node1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); List orderedStoreStatuses = new ArrayList<>(); orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, ShardStateMetaData.NO_VERSION, UUIDs.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY, null)); orderedStoreStatuses.add(new IndicesShardStoresResponse.StoreStatus(node1, ShardStateMetaData.NO_VERSION, UUIDs.randomBase64UUID(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.REPLICA, null)); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkActionTests.java index d78374d446f..3fcade05839 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/shrink/TransportShrinkActionTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.shrink; import org.apache.lucene.index.IndexWriter; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.EmptyClusterInfoService; @@ -38,7 +39,7 @@ import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllo import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.gateway.NoopGatewayAllocator; @@ -130,6 +131,8 @@ public class TransportShrinkActionTests extends ESTestCase { int numSourceShards = clusterState.metaData().index(indexName).getNumberOfShards(); DocsStats stats = new DocsStats(randomIntBetween(0, (IndexWriter.MAX_DOCS) / numSourceShards), randomIntBetween(1, 1000)); ShrinkRequest target = new ShrinkRequest("target", indexName); + final ActiveShardCount activeShardCount = randomBoolean() ? ActiveShardCount.ALL : ActiveShardCount.ONE; + target.setWaitForActiveShards(activeShardCount); CreateIndexClusterStateUpdateRequest request = TransportShrinkAction.prepareCreateIndexRequest( target, clusterState, (i) -> stats, new IndexNameExpressionResolver(Settings.EMPTY)); @@ -137,10 +140,11 @@ public class TransportShrinkActionTests extends ESTestCase { assertEquals(indexName, request.shrinkFrom().getName()); assertEquals("1", request.settings().get("index.number_of_shards")); assertEquals("shrink_index", request.cause()); + assertEquals(request.waitForActiveShards(), activeShardCount); } private DiscoveryNode newNode(String nodeId) { - return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE, emptyMap(), + return new DiscoveryNode(nodeId, LocalTransportAddress.buildUnique(), emptyMap(), Collections.unmodifiableSet(new HashSet<>(Arrays.asList(DiscoveryNode.Role.MASTER, DiscoveryNode.Role.DATA))), Version.CURRENT); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java index 726dccee597..dfc10169e70 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.stats; +import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.CommitStats; @@ -26,6 +27,8 @@ import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESSingleNodeTestCase; +import java.util.List; + import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasKey; @@ -108,4 +111,12 @@ public class IndicesStatsTests extends ESSingleNodeTestCase { } } + /** + * Gives access to package private IndicesStatsResponse constructor for test purpose. + **/ + public static IndicesStatsResponse newIndicesStatsResponse(ShardStats[] shards, int totalShards, int successfulShards, + int failedShards, List shardFailures) { + return new IndicesStatsResponse(shards, totalShards, successfulShards, failedShards, shardFailures); + } + } diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index d62fe30f6fa..8493c58729d 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.indices.template.put; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.cluster.metadata.AliasValidator; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -161,7 +160,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { null, new HashSet<>(), null, - null, null); + null, null, null); MetaDataIndexTemplateService service = new MetaDataIndexTemplateService(Settings.EMPTY, null, createIndexService, new AliasValidator(Settings.EMPTY), null, null); final List throwables = new ArrayList<>(); @@ -172,8 +171,8 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { } @Override - public void onFailure(Throwable t) { - throwables.add(t); + public void onFailure(Exception e) { + throwables.add(e); } }); return throwables; @@ -192,6 +191,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { new HashSet<>(), null, nodeServicesProvider, + null, null); MetaDataIndexTemplateService service = new MetaDataIndexTemplateService( Settings.EMPTY, clusterService, createIndexService, new AliasValidator(Settings.EMPTY), indicesService, nodeServicesProvider); @@ -205,8 +205,8 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { } @Override - public void onFailure(Throwable t) { - throwables.add(t); + public void onFailure(Exception e) { + throwables.add(e); latch.countDown(); } }); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BackoffPolicyTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BackoffPolicyTests.java new file mode 100644 index 00000000000..e3cfeb2a4ac --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/bulk/BackoffPolicyTests.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.bulk; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; + +import java.util.Iterator; +import java.util.NoSuchElementException; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; + +public class BackoffPolicyTests extends ESTestCase { + public void testWrapBackoffPolicy() { + TimeValue timeValue = timeValueMillis(between(0, Integer.MAX_VALUE)); + int maxNumberOfRetries = between(1, 1000); + BackoffPolicy policy = BackoffPolicy.constantBackoff(timeValue, maxNumberOfRetries); + AtomicInteger retries = new AtomicInteger(); + policy = BackoffPolicy.wrap(policy, retries::getAndIncrement); + + int expectedRetries = 0; + { + // Fetching the iterator doesn't call the callback + Iterator itr = policy.iterator(); + assertEquals(expectedRetries, retries.get()); + + while (itr.hasNext()) { + // hasNext doesn't trigger the callback + assertEquals(expectedRetries, retries.get()); + // next does + itr.next(); + expectedRetries += 1; + assertEquals(expectedRetries, retries.get()); + } + // next doesn't call the callback when there isn't a backoff available + expectThrows(NoSuchElementException.class, () -> itr.next()); + assertEquals(expectedRetries, retries.get()); + } + { + // The second iterator also calls the callback + Iterator itr = policy.iterator(); + itr.next(); + expectedRetries += 1; + assertEquals(expectedRetries, retries.get()); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java index e5cabd417b8..6bac7c2f8a4 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkProcessorIT.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.get.MultiGetRequestBuilder; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.Client; -import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -33,6 +32,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.MockTransportClient; import java.util.Arrays; import java.util.HashSet; @@ -159,7 +159,7 @@ public class BulkProcessorIT extends ESIntegTestCase { Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - Client transportClient = TransportClient.builder().settings(settings).build(); + Client transportClient = new MockTransportClient(settings); int bulkActions = randomIntBetween(10, 100); int numDocs = randomIntBetween(bulkActions, bulkActions + 100); diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 337f881d41b..142fb282c20 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -55,9 +55,9 @@ public class BulkRequestTests extends ESTestCase { BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); assertThat(bulkRequest.numberOfActions(), equalTo(3)); - assertThat(((IndexRequest) bulkRequest.requests().get(0)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }").toBytes())); + assertThat(((IndexRequest) bulkRequest.requests().get(0)).source(), equalTo(new BytesArray("{ \"field1\" : \"value1\" }"))); assertThat(bulkRequest.requests().get(1), instanceOf(DeleteRequest.class)); - assertThat(((IndexRequest) bulkRequest.requests().get(2)).source().toBytes(), equalTo(new BytesArray("{ \"field1\" : \"value3\" }").toBytes())); + assertThat(((IndexRequest) bulkRequest.requests().get(2)).source(), equalTo(new BytesArray("{ \"field1\" : \"value3\" }"))); } public void testSimpleBulk2() throws Exception { @@ -81,7 +81,7 @@ public class BulkRequestTests extends ESTestCase { assertThat(bulkRequest.numberOfActions(), equalTo(4)); assertThat(((UpdateRequest) bulkRequest.requests().get(0)).id(), equalTo("1")); assertThat(((UpdateRequest) bulkRequest.requests().get(0)).retryOnConflict(), equalTo(2)); - assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().toUtf8(), equalTo("{\"field\":\"value\"}")); + assertThat(((UpdateRequest) bulkRequest.requests().get(0)).doc().source().utf8ToString(), equalTo("{\"field\":\"value\"}")); assertThat(((UpdateRequest) bulkRequest.requests().get(1)).id(), equalTo("0")); assertThat(((UpdateRequest) bulkRequest.requests().get(1)).type(), equalTo("type1")); assertThat(((UpdateRequest) bulkRequest.requests().get(1)).index(), equalTo("index1")); @@ -93,7 +93,7 @@ public class BulkRequestTests extends ESTestCase { assertThat(scriptParams, notNullValue()); assertThat(scriptParams.size(), equalTo(1)); assertThat(((Integer) scriptParams.get("param1")), equalTo(1)); - assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().toUtf8(), equalTo("{\"counter\":1}")); + assertThat(((UpdateRequest) bulkRequest.requests().get(1)).upsertRequest().source().utf8ToString(), equalTo("{\"counter\":1}")); } public void testBulkAllowExplicitIndex() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkShardRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkShardRequestTests.java index b26d2531ff0..bb406366d25 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkShardRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkShardRequestTests.java @@ -29,11 +29,11 @@ public class BulkShardRequestTests extends ESTestCase { public void testToString() { String index = randomSimpleString(random(), 10); int count = between(1, 100); - BulkShardRequest r = new BulkShardRequest(null, new ShardId(index, "ignored", 0), RefreshPolicy.NONE, new BulkItemRequest[count]); + BulkShardRequest r = new BulkShardRequest(new ShardId(index, "ignored", 0), RefreshPolicy.NONE, new BulkItemRequest[count]); assertEquals("BulkShardRequest to [" + index + "] containing [" + count + "] requests", r.toString()); - r = new BulkShardRequest(null, new ShardId(index, "ignored", 0), RefreshPolicy.IMMEDIATE, new BulkItemRequest[count]); + r = new BulkShardRequest(new ShardId(index, "ignored", 0), RefreshPolicy.IMMEDIATE, new BulkItemRequest[count]); assertEquals("BulkShardRequest to [" + index + "] containing [" + count + "] requests and a refresh", r.toString()); - r = new BulkShardRequest(null, new ShardId(index, "ignored", 0), RefreshPolicy.WAIT_UNTIL, new BulkItemRequest[count]); + r = new BulkShardRequest(new ShardId(index, "ignored", 0), RefreshPolicy.WAIT_UNTIL, new BulkItemRequest[count]); assertEquals("BulkShardRequest to [" + index + "] containing [" + count + "] requests blocking until refresh", r.toString()); } } diff --git a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java index 6d9987394f9..4fa640b3adc 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/RetryTests.java @@ -149,7 +149,7 @@ public class RetryTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { this.lastFailure = e; latch.countDown(); } diff --git a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index 6ae7559ba62..7c39adc76f6 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -59,7 +59,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; public class TransportBulkActionTookTests extends ESTestCase { - static private ThreadPool threadPool; + private static ThreadPool threadPool; private ClusterService clusterService; @BeforeClass @@ -201,7 +201,7 @@ public class TransportBulkActionTookTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { } }); diff --git a/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java b/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java index 451ade62584..ef259463139 100644 --- a/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/get/MultiGetShardRequestTests.java @@ -70,7 +70,7 @@ public class MultiGetShardRequestTests extends ESTestCase { out.setVersion(randomVersion(random())); multiGetShardRequest.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); in.setVersion(out.getVersion()); MultiGetShardRequest multiGetShardRequest2 = new MultiGetShardRequest(); multiGetShardRequest2.readFrom(in); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java b/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java index 3286c07e06c..9ee5036131d 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/BulkRequestModifierTests.java @@ -111,7 +111,7 @@ public class BulkRequestModifierTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { } }); @@ -157,7 +157,7 @@ public class BulkRequestModifierTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { } public BulkResponse getResponse() { diff --git a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java index a62946bf0f6..3d1a1a1c69d 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/IngestProxyActionFilterTests.java @@ -33,7 +33,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; @@ -78,7 +78,7 @@ public class IngestProxyActionFilterTests extends ESTestCase { if (i < ingestNodes) { roles.add(DiscoveryNode.Role.INGEST); } - DiscoveryNode node = new DiscoveryNode(nodeId, nodeId, DummyTransportAddress.INSTANCE, attributes, roles, VersionUtils.randomVersion(random())); + DiscoveryNode node = new DiscoveryNode(nodeId, nodeId, LocalTransportAddress.buildUnique(), attributes, roles, VersionUtils.randomVersion(random())); builder.put(node); if (i == totalNodes - 1) { localNode = node; diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java index 323a8c0aaa6..544e2932b44 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java @@ -45,7 +45,7 @@ public class SimulateDocumentSimpleResultTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); simulateDocumentBaseResult.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); + StreamInput streamInput = out.bytes().streamInput(); SimulateDocumentBaseResult otherSimulateDocumentBaseResult = new SimulateDocumentBaseResult(streamInput); if (isFailure) { diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java index 5806d8c312b..5b3551b24d1 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateExecutionServiceTests.java @@ -160,7 +160,24 @@ public class SimulateExecutionServiceTests extends ESTestCase { } public void testExecuteVerboseItemExceptionWithIgnoreFailure() throws Exception { - TestProcessor testProcessor = new TestProcessor("processor_0", "mock", ingestDocument -> { throw new RuntimeException("processor failed"); }); + RuntimeException exception = new RuntimeException("processor failed"); + TestProcessor testProcessor = new TestProcessor("processor_0", "mock", ingestDocument -> { throw exception; }); + CompoundProcessor processor = new CompoundProcessor(true, Collections.singletonList(testProcessor), Collections.emptyList()); + Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor)); + SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); + assertThat(testProcessor.getInvokedCounter(), equalTo(1)); + assertThat(actualItemResponse, instanceOf(SimulateDocumentVerboseResult.class)); + SimulateDocumentVerboseResult simulateDocumentVerboseResult = (SimulateDocumentVerboseResult) actualItemResponse; + assertThat(simulateDocumentVerboseResult.getProcessorResults().size(), equalTo(1)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getProcessorTag(), equalTo("processor_0")); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getFailure(), sameInstance(exception)); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), not(sameInstance(ingestDocument))); + assertIngestDocument(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument(), ingestDocument); + assertThat(simulateDocumentVerboseResult.getProcessorResults().get(0).getIngestDocument().getSourceAndMetadata(), not(sameInstance(ingestDocument.getSourceAndMetadata()))); + } + + public void testExecuteVerboseItemWithoutExceptionAndWithIgnoreFailure() throws Exception { + TestProcessor testProcessor = new TestProcessor("processor_0", "mock", ingestDocument -> { }); CompoundProcessor processor = new CompoundProcessor(true, Collections.singletonList(testProcessor), Collections.emptyList()); Pipeline pipeline = new Pipeline("_id", "_description", new CompoundProcessor(processor)); SimulateDocumentResult actualItemResponse = executionService.executeDocument(pipeline, ingestDocument, true); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java index fa4bdc6525d..8418c886be9 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestParsingTests.java @@ -19,19 +19,6 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.ingest.PipelineStore; -import org.elasticsearch.ingest.ProcessorsRegistry; -import org.elasticsearch.ingest.TestProcessor; -import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.ingest.CompoundProcessor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Pipeline; -import org.elasticsearch.ingest.Processor; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - import java.io.IOException; import java.util.ArrayList; import java.util.Collections; @@ -40,6 +27,15 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import org.elasticsearch.ingest.CompoundProcessor; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Pipeline; +import org.elasticsearch.ingest.PipelineStore; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.TestProcessor; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + import static org.elasticsearch.action.ingest.SimulatePipelineRequest.Fields; import static org.elasticsearch.action.ingest.SimulatePipelineRequest.SIMULATED_PIPELINE_ID; import static org.elasticsearch.ingest.IngestDocument.MetaData.ID; @@ -59,12 +55,11 @@ public class SimulatePipelineRequestParsingTests extends ESTestCase { TestProcessor processor = new TestProcessor(ingestDocument -> {}); CompoundProcessor pipelineCompoundProcessor = new CompoundProcessor(processor); Pipeline pipeline = new Pipeline(SIMULATED_PIPELINE_ID, null, pipelineCompoundProcessor); - ProcessorsRegistry.Builder processorRegistryBuilder = new ProcessorsRegistry.Builder(); - processorRegistryBuilder.registerProcessor("mock_processor", ((registry) -> mock(Processor.Factory.class))); - ProcessorsRegistry processorRegistry = processorRegistryBuilder.build(mock(ScriptService.class), mock(ClusterService.class)); + Map registry = + Collections.singletonMap("mock_processor", (factories, tag, config) -> processor); store = mock(PipelineStore.class); when(store.get(SIMULATED_PIPELINE_ID)).thenReturn(pipeline); - when(store.getProcessorRegistry()).thenReturn(processorRegistry); + when(store.getProcessorFactories()).thenReturn(registry); } public void testParseUsingPipelineStore() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java index 1376ca4280e..576e8e01724 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java @@ -73,7 +73,7 @@ public class SimulatePipelineResponseTests extends ESTestCase { SimulatePipelineResponse response = new SimulatePipelineResponse(randomAsciiOfLengthBetween(1, 10), isVerbose, results); BytesStreamOutput out = new BytesStreamOutput(); response.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); + StreamInput streamInput = out.bytes().streamInput(); SimulatePipelineResponse otherResponse = new SimulatePipelineResponse(); otherResponse.readFrom(streamInput); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java index f612f36c9d6..f6ffc035342 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java @@ -37,27 +37,39 @@ public class SimulateProcessorResultTests extends ESTestCase { public void testSerialization() throws IOException { String processorTag = randomAsciiOfLengthBetween(1, 10); - boolean isFailure = randomBoolean(); + boolean isSuccessful = randomBoolean(); + boolean isIgnoredException = randomBoolean(); SimulateProcessorResult simulateProcessorResult; - if (isFailure) { - simulateProcessorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); - } else { + if (isSuccessful) { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument); + if (isIgnoredException) { + simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument, new IllegalArgumentException("test")); + } else { + simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument); + } + } else { + simulateProcessorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); } BytesStreamOutput out = new BytesStreamOutput(); simulateProcessorResult.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); + StreamInput streamInput = out.bytes().streamInput(); SimulateProcessorResult otherSimulateProcessorResult = new SimulateProcessorResult(streamInput); assertThat(otherSimulateProcessorResult.getProcessorTag(), equalTo(simulateProcessorResult.getProcessorTag())); - if (isFailure) { - assertThat(simulateProcessorResult.getIngestDocument(), is(nullValue())); + if (isSuccessful) { + assertIngestDocument(otherSimulateProcessorResult.getIngestDocument(), simulateProcessorResult.getIngestDocument()); + if (isIgnoredException) { + assertThat(otherSimulateProcessorResult.getFailure(), instanceOf(IllegalArgumentException.class)); + IllegalArgumentException e = (IllegalArgumentException) otherSimulateProcessorResult.getFailure(); + assertThat(e.getMessage(), equalTo("test")); + } else { + assertThat(otherSimulateProcessorResult.getFailure(), nullValue()); + } + } else { + assertThat(otherSimulateProcessorResult.getIngestDocument(), is(nullValue())); assertThat(otherSimulateProcessorResult.getFailure(), instanceOf(IllegalArgumentException.class)); IllegalArgumentException e = (IllegalArgumentException) otherSimulateProcessorResult.getFailure(); assertThat(e.getMessage(), equalTo("test")); - } else { - assertIngestDocument(otherSimulateProcessorResult.getIngestDocument(), simulateProcessorResult.getIngestDocument()); } } } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/TrackingResultProcessorTests.java b/core/src/test/java/org/elasticsearch/action/ingest/TrackingResultProcessorTests.java index 5b0a0599090..999cbe435f2 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/TrackingResultProcessorTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/TrackingResultProcessorTests.java @@ -39,6 +39,7 @@ import static org.elasticsearch.ingest.CompoundProcessor.ON_FAILURE_PROCESSOR_TY import static org.elasticsearch.action.ingest.TrackingResultProcessor.decorate; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.sameInstance; public class TrackingResultProcessorTests extends ESTestCase { @@ -142,7 +143,7 @@ public class TrackingResultProcessorTests extends ESTestCase { assertThat(testProcessor.getInvokedCounter(), equalTo(1)); assertThat(resultList.size(), equalTo(1)); assertThat(resultList.get(0).getIngestDocument(), equalTo(expectedResult.getIngestDocument())); - assertThat(resultList.get(0).getFailure(), nullValue()); + assertThat(resultList.get(0).getFailure(), sameInstance(exception)); assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedResult.getProcessorTag())); } } diff --git a/core/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java b/core/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java index 3f252c37072..00327603ba8 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/WritePipelineResponseTests.java @@ -35,7 +35,7 @@ public class WritePipelineResponseTests extends ESTestCase { response = new WritePipelineResponse(isAcknowledged); BytesStreamOutput out = new BytesStreamOutput(); response.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); + StreamInput streamInput = out.bytes().streamInput(); WritePipelineResponse otherResponse = new WritePipelineResponse(); otherResponse.readFrom(streamInput); @@ -46,7 +46,7 @@ public class WritePipelineResponseTests extends ESTestCase { WritePipelineResponse response = new WritePipelineResponse(); BytesStreamOutput out = new BytesStreamOutput(); response.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); + StreamInput streamInput = out.bytes().streamInput(); WritePipelineResponse otherResponse = new WritePipelineResponse(); otherResponse.readFrom(streamInput); diff --git a/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java b/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java index a7ce842913d..b4908846e97 100644 --- a/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java +++ b/core/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java @@ -112,7 +112,7 @@ public class WriteableIngestDocumentTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); writeableIngestDocument.writeTo(out); - StreamInput streamInput = StreamInput.wrap(out.bytes()); + StreamInput streamInput = out.bytes().streamInput(); WriteableIngestDocument otherWriteableIngestDocument = new WriteableIngestDocument(streamInput); assertIngestDocument(otherWriteableIngestDocument.getIngestDocument(), writeableIngestDocument.getIngestDocument()); } diff --git a/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java b/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java index 2bff71d3c40..a8c550e01c5 100644 --- a/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/main/MainActionTests.java @@ -64,7 +64,7 @@ public class MainActionTests extends ESTestCase { BytesStreamOutput streamOutput = new BytesStreamOutput(); mainResponse.writeTo(streamOutput); final MainResponse serialized = new MainResponse(); - serialized.readFrom(new ByteBufferStreamInput(ByteBuffer.wrap(streamOutput.bytes().toBytes()))); + serialized.readFrom(streamOutput.bytes().streamInput()); assertThat(serialized.getNodeName(), equalTo(nodeName)); assertThat(serialized.getClusterName(), equalTo(clusterName)); @@ -121,7 +121,7 @@ public class MainActionTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { logger.error("unexpected error", e); } }); diff --git a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java index f0f03347773..690f49171a6 100644 --- a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java @@ -142,7 +142,7 @@ public class MultiSearchRequestTests extends ESTestCase { private IndicesQueriesRegistry registry() { IndicesQueriesRegistry registry = new IndicesQueriesRegistry(); QueryParser parser = MatchAllQueryBuilder::fromXContent; - registry.register(parser, MatchAllQueryBuilder.QUERY_NAME_FIELD); + registry.register(parser, MatchAllQueryBuilder.NAME); return registry; } } diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java index 9cef4d46e8b..eb2f4b6904d 100644 --- a/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/SearchRequestBuilderTests.java @@ -20,12 +20,12 @@ package org.elasticsearch.action.search; import org.elasticsearch.client.Client; -import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.MockTransportClient; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -41,7 +41,7 @@ public class SearchRequestBuilderTests extends ESTestCase { Settings settings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - client = TransportClient.builder().settings(settings).build(); + client = new MockTransportClient(settings); } @AfterClass diff --git a/core/src/test/java/org/elasticsearch/action/search/TransportSearchIT.java b/core/src/test/java/org/elasticsearch/action/search/TransportSearchIT.java index 240b26b9287..b3c695e881d 100644 --- a/core/src/test/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/core/src/test/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -37,7 +37,6 @@ public class TransportSearchIT extends ESIntegTestCase { .setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numPrimaries1)); assertAcked(prepareCreate("test2") .setSettings(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numPrimaries2)); - ensureYellow("test1", "test2"); // no exception client().prepareSearch("test1").get(); diff --git a/core/src/test/java/org/elasticsearch/action/support/ActiveShardCountTests.java b/core/src/test/java/org/elasticsearch/action/support/ActiveShardCountTests.java new file mode 100644 index 00000000000..83f0b1332c7 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/support/ActiveShardCountTests.java @@ -0,0 +1,305 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.support; + +import com.carrotsearch.hppc.cursors.ObjectCursor; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.io.stream.ByteBufferStreamInput; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.nio.ByteBuffer; + +import static org.hamcrest.Matchers.equalTo; + +/** + * Tests for the {@link ActiveShardCount} class + */ +public class ActiveShardCountTests extends ESTestCase { + + public void testFromIntValue() { + assertSame(ActiveShardCount.from(0), ActiveShardCount.NONE); + final int value = randomIntBetween(1, 50); + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + assertEquals(ActiveShardCount.from(value).resolve(indexMetaData), value); + expectThrows(IllegalArgumentException.class, () -> ActiveShardCount.from(randomIntBetween(-10, -1))); + } + + public void testResolve() { + // one shard + IndexMetaData indexMetaData = IndexMetaData.builder("test") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .build(); + assertThat(ActiveShardCount.ALL.resolve(indexMetaData), equalTo(1)); + assertThat(ActiveShardCount.DEFAULT.resolve(indexMetaData), equalTo(1)); + assertThat(ActiveShardCount.NONE.resolve(indexMetaData), equalTo(0)); + final int value = randomIntBetween(2, 20); + assertThat(ActiveShardCount.from(value).resolve(indexMetaData), equalTo(value)); + + // more than one shard + final int numNewShards = randomIntBetween(1, 20); + indexMetaData = IndexMetaData.builder("test") + .settings(settings(Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(numNewShards) + .build(); + assertThat(ActiveShardCount.ALL.resolve(indexMetaData), equalTo(numNewShards + 1)); + assertThat(ActiveShardCount.DEFAULT.resolve(indexMetaData), equalTo(1)); + assertThat(ActiveShardCount.NONE.resolve(indexMetaData), equalTo(0)); + assertThat(ActiveShardCount.from(value).resolve(indexMetaData), equalTo(value)); + } + + public void testSerialization() throws IOException { + doWriteRead(ActiveShardCount.ALL); + doWriteRead(ActiveShardCount.DEFAULT); + doWriteRead(ActiveShardCount.NONE); + doWriteRead(ActiveShardCount.from(randomIntBetween(1, 50))); + } + + public void testParseString() { + assertSame(ActiveShardCount.parseString("all"), ActiveShardCount.ALL); + assertSame(ActiveShardCount.parseString(null), ActiveShardCount.DEFAULT); + assertSame(ActiveShardCount.parseString("0"), ActiveShardCount.NONE); + int value = randomIntBetween(1, 50); + assertEquals(ActiveShardCount.parseString(value + ""), ActiveShardCount.from(value)); + expectThrows(IllegalArgumentException.class, () -> ActiveShardCount.parseString(randomAsciiOfLengthBetween(4, 8))); + expectThrows(IllegalArgumentException.class, () -> ActiveShardCount.parseString("-1")); // magic numbers not exposed through API + expectThrows(IllegalArgumentException.class, () -> ActiveShardCount.parseString("-2")); + expectThrows(IllegalArgumentException.class, () -> ActiveShardCount.parseString(randomIntBetween(-10, -3) + "")); + } + + private void doWriteRead(ActiveShardCount activeShardCount) throws IOException { + final BytesStreamOutput out = new BytesStreamOutput(); + activeShardCount.writeTo(out); + final ByteBufferStreamInput in = new ByteBufferStreamInput(ByteBuffer.wrap(out.bytes().toBytesRef().bytes)); + ActiveShardCount readActiveShardCount = ActiveShardCount.readFrom(in); + if (activeShardCount == ActiveShardCount.DEFAULT + || activeShardCount == ActiveShardCount.ALL + || activeShardCount == ActiveShardCount.NONE) { + assertSame(activeShardCount, readActiveShardCount); + } else { + assertEquals(activeShardCount, readActiveShardCount); + } + } + + public void testEnoughShardsActiveZero() { + final String indexName = "test-idx"; + final int numberOfShards = randomIntBetween(1, 5); + final int numberOfReplicas = randomIntBetween(4, 7); + final ActiveShardCount waitForActiveShards = ActiveShardCount.from(0); + ClusterState clusterState = initializeWithNewIndex(indexName, numberOfShards, numberOfReplicas); + assertTrue(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startPrimaries(clusterState, indexName); + assertTrue(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startLessThanWaitOnShards(clusterState, indexName, waitForActiveShards); + assertTrue(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startWaitOnShards(clusterState, indexName, waitForActiveShards); + assertTrue(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startAllShards(clusterState, indexName); + assertTrue(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + } + + public void testEnoughShardsActiveLevelOne() { + runTestForOneActiveShard(ActiveShardCount.ONE); + } + + public void testEnoughShardsActiveLevelDefault() { + // default is 1 + runTestForOneActiveShard(ActiveShardCount.DEFAULT); + } + + public void testEnoughShardsActiveRandom() { + final String indexName = "test-idx"; + final int numberOfShards = randomIntBetween(1, 5); + final int numberOfReplicas = randomIntBetween(4, 7); + final ActiveShardCount waitForActiveShards = ActiveShardCount.from(randomIntBetween(2, numberOfReplicas)); + ClusterState clusterState = initializeWithNewIndex(indexName, numberOfShards, numberOfReplicas); + assertFalse(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startPrimaries(clusterState, indexName); + assertFalse(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startLessThanWaitOnShards(clusterState, indexName, waitForActiveShards); + assertFalse(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startWaitOnShards(clusterState, indexName, waitForActiveShards); + assertTrue(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startAllShards(clusterState, indexName); + assertTrue(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + } + + public void testEnoughShardsActiveLevelAll() { + final String indexName = "test-idx"; + final int numberOfShards = randomIntBetween(1, 5); + final int numberOfReplicas = randomIntBetween(4, 7); + // both values should represent "all" + final ActiveShardCount waitForActiveShards = randomBoolean() ? ActiveShardCount.from(numberOfReplicas + 1) : ActiveShardCount.ALL; + ClusterState clusterState = initializeWithNewIndex(indexName, numberOfShards, numberOfReplicas); + assertFalse(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startPrimaries(clusterState, indexName); + assertFalse(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startLessThanWaitOnShards(clusterState, indexName, waitForActiveShards); + assertFalse(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startAllShards(clusterState, indexName); + assertTrue(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + } + + private void runTestForOneActiveShard(final ActiveShardCount activeShardCount) { + final String indexName = "test-idx"; + final int numberOfShards = randomIntBetween(1, 5); + final int numberOfReplicas = randomIntBetween(4, 7); + assert activeShardCount == ActiveShardCount.ONE || activeShardCount == ActiveShardCount.DEFAULT; + final ActiveShardCount waitForActiveShards = activeShardCount; + ClusterState clusterState = initializeWithNewIndex(indexName, numberOfShards, numberOfReplicas); + assertFalse(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startPrimaries(clusterState, indexName); + assertTrue(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startLessThanWaitOnShards(clusterState, indexName, waitForActiveShards); + assertTrue(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startWaitOnShards(clusterState, indexName, waitForActiveShards); + assertTrue(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + clusterState = startAllShards(clusterState, indexName); + assertTrue(waitForActiveShards.enoughShardsActive(clusterState, indexName)); + } + + private ClusterState initializeWithNewIndex(final String indexName, final int numShards, final int numReplicas) { + // initial index creation and new routing table info + final IndexMetaData indexMetaData = IndexMetaData.builder(indexName) + .settings(settings(Version.CURRENT) + .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID())) + .numberOfShards(numShards) + .numberOfReplicas(numReplicas) + .build(); + final MetaData metaData = MetaData.builder().put(indexMetaData, true).build(); + final RoutingTable routingTable = RoutingTable.builder().addAsNew(indexMetaData).build(); + return ClusterState.builder(new ClusterName("test_cluster")).metaData(metaData).routingTable(routingTable).build(); + } + + private ClusterState startPrimaries(final ClusterState clusterState, final String indexName) { + RoutingTable routingTable = clusterState.routingTable(); + IndexRoutingTable indexRoutingTable = routingTable.index(indexName); + IndexRoutingTable.Builder newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex()); + for (final ObjectCursor shardEntry : indexRoutingTable.getShards().values()) { + final IndexShardRoutingTable shardRoutingTable = shardEntry.value; + for (ShardRouting shardRouting : shardRoutingTable.getShards()) { + if (shardRouting.primary()) { + shardRouting = shardRouting.initialize(randomAsciiOfLength(8), null, shardRouting.getExpectedShardSize()) + .moveToStarted(); + } + newIndexRoutingTable.addShard(shardRouting); + } + } + routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build(); + return ClusterState.builder(clusterState).routingTable(routingTable).build(); + } + + private ClusterState startLessThanWaitOnShards(final ClusterState clusterState, final String indexName, + final ActiveShardCount waitForActiveShards) { + RoutingTable routingTable = clusterState.routingTable(); + IndexRoutingTable indexRoutingTable = routingTable.index(indexName); + IndexRoutingTable.Builder newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex()); + for (final ObjectCursor shardEntry : indexRoutingTable.getShards().values()) { + final IndexShardRoutingTable shardRoutingTable = shardEntry.value; + assert shardRoutingTable.getSize() > 2; + // want less than half, and primary is already started + int numToStart = waitForActiveShards.resolve(clusterState.metaData().index(indexName)) - 2; + for (ShardRouting shardRouting : shardRoutingTable.getShards()) { + if (shardRouting.primary()) { + assertTrue(shardRouting.active()); + } else { + if (numToStart > 0) { + shardRouting = shardRouting.initialize(randomAsciiOfLength(8), null, shardRouting.getExpectedShardSize()) + .moveToStarted(); + numToStart--; + } + } + newIndexRoutingTable.addShard(shardRouting); + } + } + routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build(); + return ClusterState.builder(clusterState).routingTable(routingTable).build(); + } + + private ClusterState startWaitOnShards(final ClusterState clusterState, final String indexName, + final ActiveShardCount waitForActiveShards) { + RoutingTable routingTable = clusterState.routingTable(); + IndexRoutingTable indexRoutingTable = routingTable.index(indexName); + IndexRoutingTable.Builder newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex()); + for (final ObjectCursor shardEntry : indexRoutingTable.getShards().values()) { + final IndexShardRoutingTable shardRoutingTable = shardEntry.value; + assert shardRoutingTable.getSize() > 2; + int numToStart = waitForActiveShards.resolve(clusterState.metaData().index(indexName)) - 1; // primary is already started + for (ShardRouting shardRouting : shardRoutingTable.getShards()) { + if (shardRouting.primary()) { + assertTrue(shardRouting.active()); + } else { + if (shardRouting.active() == false) { + if (numToStart > 0) { + shardRouting = shardRouting.initialize(randomAsciiOfLength(8), null, shardRouting.getExpectedShardSize()) + .moveToStarted(); + numToStart--; + } + } else { + numToStart--; + } + } + newIndexRoutingTable.addShard(shardRouting); + } + } + routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build(); + return ClusterState.builder(clusterState).routingTable(routingTable).build(); + } + + private ClusterState startAllShards(final ClusterState clusterState, final String indexName) { + RoutingTable routingTable = clusterState.routingTable(); + IndexRoutingTable indexRoutingTable = routingTable.index(indexName); + IndexRoutingTable.Builder newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex()); + for (final ObjectCursor shardEntry : indexRoutingTable.getShards().values()) { + final IndexShardRoutingTable shardRoutingTable = shardEntry.value; + for (ShardRouting shardRouting : shardRoutingTable.getShards()) { + if (shardRouting.primary()) { + assertTrue(shardRouting.active()); + } else { + if (shardRouting.active() == false) { + shardRouting = shardRouting.initialize(randomAsciiOfLength(8), null, shardRouting.getExpectedShardSize()) + .moveToStarted(); + } + } + newIndexRoutingTable.addShard(shardRouting); + } + } + routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build(); + return ClusterState.builder(clusterState).routingTable(routingTable).build(); + } + +} diff --git a/core/src/test/java/org/elasticsearch/action/support/ActiveShardsObserverIT.java b/core/src/test/java/org/elasticsearch/action/support/ActiveShardsObserverIT.java new file mode 100644 index 00000000000..ec3b5421e0e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/support/ActiveShardsObserverIT.java @@ -0,0 +1,148 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.support; + +import org.elasticsearch.action.ListenableActionFuture; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESIntegTestCase; + +import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING; +import static org.elasticsearch.cluster.metadata.IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; + +/** + * Tests that the index creation operation waits for the appropriate + * number of active shards to be started before returning. + */ +public class ActiveShardsObserverIT extends ESIntegTestCase { + + public void testCreateIndexNoActiveShardsTimesOut() throws Exception { + Settings.Builder settingsBuilder = Settings.builder() + .put(indexSettings()) + .put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 5)) + .put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0); + if (internalCluster().getNodeNames().length > 0) { + String exclude = String.join(",", internalCluster().getNodeNames()); + settingsBuilder.put("index.routing.allocation.exclude._name", exclude); + } + Settings settings = settingsBuilder.build(); + assertFalse(prepareCreate("test-idx") + .setSettings(settings) + .setWaitForActiveShards(randomBoolean() ? ActiveShardCount.from(1) : ActiveShardCount.ALL) + .setTimeout("100ms") + .get() + .isShardsAcked()); + } + + public void testCreateIndexNoActiveShardsNoWaiting() throws Exception { + Settings.Builder settingsBuilder = Settings.builder() + .put(indexSettings()) + .put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 5)) + .put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0); + if (internalCluster().getNodeNames().length > 0) { + String exclude = String.join(",", internalCluster().getNodeNames()); + settingsBuilder.put("index.routing.allocation.exclude._name", exclude); + } + Settings settings = settingsBuilder.build(); + CreateIndexResponse response = prepareCreate("test-idx") + .setSettings(settings) + .setWaitForActiveShards(ActiveShardCount.from(0)) + .get(); + assertTrue(response.isAcknowledged()); + } + + public void testCreateIndexNotEnoughActiveShardsTimesOut() throws Exception { + final int numDataNodes = internalCluster().numDataNodes(); + final int numReplicas = numDataNodes + randomInt(4); + Settings settings = Settings.builder() + .put(indexSettings()) + .put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 7)) + .put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), numReplicas) + .build(); + assertFalse(prepareCreate("test-idx") + .setSettings(settings) + .setWaitForActiveShards(ActiveShardCount.from(randomIntBetween(numDataNodes + 1, numReplicas + 1))) + .setTimeout("100ms") + .get() + .isShardsAcked()); + } + + public void testCreateIndexEnoughActiveShards() throws Exception { + final String indexName = "test-idx"; + Settings settings = Settings.builder() + .put(indexSettings()) + .put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 7)) + .put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), internalCluster().numDataNodes() + randomIntBetween(0, 3)) + .build(); + ActiveShardCount waitForActiveShards = ActiveShardCount.from(randomIntBetween(0, internalCluster().numDataNodes())); + assertAcked(prepareCreate(indexName).setSettings(settings).setWaitForActiveShards(waitForActiveShards).get()); + } + + public void testCreateIndexWaitsForAllActiveShards() throws Exception { + // not enough data nodes, index creation times out + final int numReplicas = internalCluster().numDataNodes() + randomInt(4); + Settings settings = Settings.builder() + .put(indexSettings()) + .put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 5)) + .put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), numReplicas) + .build(); + assertFalse(prepareCreate("test-idx1") + .setSettings(settings) + .setWaitForActiveShards(ActiveShardCount.ALL) + .setTimeout("100ms") + .get() + .isShardsAcked()); + + // enough data nodes, all shards are active + settings = Settings.builder() + .put(indexSettings()) + .put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 7)) + .put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), internalCluster().numDataNodes() - 1) + .build(); + assertAcked(prepareCreate("test-idx2").setSettings(settings).setWaitForActiveShards(ActiveShardCount.ALL).get()); + } + + public void testCreateIndexStopsWaitingWhenIndexDeleted() throws Exception { + final String indexName = "test-idx"; + Settings settings = Settings.builder() + .put(indexSettings()) + .put(INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 5)) + .put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), internalCluster().numDataNodes() - 1) + .build(); + + logger.info("--> start the index creation process"); + ListenableActionFuture responseListener = + prepareCreate(indexName) + .setSettings(settings) + .setWaitForActiveShards(ActiveShardCount.ALL) + .execute(); + + logger.info("--> wait until the cluster state contains the new index"); + assertBusy(() -> assertTrue(client().admin().cluster().prepareState().get().getState().metaData().hasIndex(indexName))); + + logger.info("--> delete the index"); + assertAcked(client().admin().indices().prepareDelete(indexName)); + + logger.info("--> ensure the create index request completes"); + assertAcked(responseListener.get()); + } + +} diff --git a/core/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java b/core/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java index d5ed5302b97..d656e0f62a9 100644 --- a/core/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java @@ -38,7 +38,7 @@ public class IndicesOptionsTests extends ESTestCase { output.setVersion(outputVersion); indicesOptions.writeIndicesOptions(output); - StreamInput streamInput = StreamInput.wrap(output.bytes()); + StreamInput streamInput = output.bytes().streamInput(); streamInput.setVersion(randomVersion(random())); IndicesOptions indicesOptions2 = IndicesOptions.readIndicesOptions(streamInput); diff --git a/core/src/test/java/org/elasticsearch/action/support/ListenableActionFutureTests.java b/core/src/test/java/org/elasticsearch/action/support/ListenableActionFutureTests.java index 80492f0be61..8169a674bed 100644 --- a/core/src/test/java/org/elasticsearch/action/support/ListenableActionFutureTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/ListenableActionFutureTests.java @@ -45,15 +45,15 @@ public class ListenableActionFutureTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { error.set(e); listenerCalled.countDown(); } }); Thread networkThread = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - error.set(t); + public void onFailure(Exception e) { + error.set(e); listenerCalled.countDown(); } diff --git a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index 00068c05efe..bbf1d2f1942 100644 --- a/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -41,6 +41,8 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.Function; +import java.util.stream.IntStream; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; @@ -102,8 +104,8 @@ public class TransportActionFilterChainTests extends ESTestCase { try { assertThat(future.get(), notNullValue()); assertThat("shouldn't get here if an error is expected", errorExpected, equalTo(false)); - } catch(Throwable t) { - assertThat("shouldn't get here if an error is not expected " + t.getMessage(), errorExpected, equalTo(true)); + } catch (ExecutionException e) { + assertThat("shouldn't get here if an error is not expected " + e.getMessage(), errorExpected, equalTo(true)); } List testFiltersByLastExecution = new ArrayList<>(); @@ -182,8 +184,8 @@ public class TransportActionFilterChainTests extends ESTestCase { try { assertThat(future.get(), notNullValue()); assertThat("shouldn't get here if an error is expected", errorExpected, equalTo(false)); - } catch(Throwable t) { - assertThat("shouldn't get here if an error is not expected " + t.getMessage(), errorExpected, equalTo(true)); + } catch(ExecutionException e) { + assertThat("shouldn't get here if an error is not expected " + e.getMessage(), errorExpected, equalTo(true)); } List testFiltersByLastExecution = new ArrayList<>(); @@ -252,7 +254,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { failures.add(e); latch.countDown(); } @@ -309,7 +311,7 @@ public class TransportActionFilterChainTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { failures.add(e); latch.countDown(); } @@ -445,12 +447,12 @@ public class TransportActionFilterChainTests extends ESTestCase { } } - private static interface RequestCallback { + private interface RequestCallback { , Response extends ActionResponse> void execute(Task task, String action, Request request, ActionListener listener, ActionFilterChain actionFilterChain); } - private static interface ResponseCallback { + private interface ResponseCallback { void execute(String action, Response response, ActionListener listener, ActionFilterChain chain); } diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 1d65f277e3c..603ad664ec3 100644 --- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -49,7 +49,7 @@ import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; @@ -247,7 +247,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { } static DiscoveryNode newNode(int nodeId) { - return new DiscoveryNode("node_" + nodeId, DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + return new DiscoveryNode("node_" + nodeId, LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); } @AfterClass @@ -491,7 +491,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { } @Override - public void sendResponse(Throwable error) throws IOException { + public void sendResponse(Exception exception) throws IOException { } @Override diff --git a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index 32fe6b1e408..9aeafcac0e4 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.action.support.master; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; @@ -37,7 +38,7 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.MasterNotDiscoveredException; @@ -89,9 +90,9 @@ public class TransportMasterNodeActionTests extends ESTestCase { transportService = new TransportService(clusterService.getSettings(), transport, threadPool); transportService.start(); transportService.acceptIncomingRequests(); - localNode = new DiscoveryNode("local_node", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + localNode = new DiscoveryNode("local_node", LocalTransportAddress.buildUnique(), Collections.emptyMap(), Collections.singleton(DiscoveryNode.Role.MASTER), Version.CURRENT); - remoteNode = new DiscoveryNode("remote_node", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + remoteNode = new DiscoveryNode("remote_node", LocalTransportAddress.buildUnique(), Collections.emptyMap(), Collections.singleton(DiscoveryNode.Role.MASTER), Version.CURRENT); allNodes = new DiscoveryNode[]{localNode, remoteNode}; } @@ -136,7 +137,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { @Override protected void doExecute(Task task, final Request request, ActionListener listener) { // remove unneeded threading by wrapping listener with SAME to prevent super.doExecute from wrapping it with LISTENER - super.doExecute(task, request, new ThreadedActionListener<>(logger, threadPool, ThreadPool.Names.SAME, listener)); + super.doExecute(task, request, new ThreadedActionListener<>(logger, threadPool, ThreadPool.Names.SAME, listener, false)); } @Override @@ -167,7 +168,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { Request request = new Request(); PlainActionFuture listener = new PlainActionFuture<>(); - final Throwable exception = new Throwable(); + final Exception exception = new Exception(); final Response response = new Response(); setState(clusterService, ClusterStateCreationUtils.state(localNode, localNode, allNodes)); @@ -244,7 +245,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { Request request = new Request(); PlainActionFuture listener = new PlainActionFuture<>(); - setState(clusterService, ClusterStateCreationUtils.state(localNode, randomFrom(null, localNode, remoteNode), allNodes)); + setState(clusterService, ClusterStateCreationUtils.state(localNode, randomFrom(localNode, remoteNode, null), allNodes)); new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool) { @Override @@ -317,14 +318,19 @@ public class TransportMasterNodeActionTests extends ESTestCase { assertTrue(listener.isDone()); listener.get(); } else { - Throwable t = new Throwable(); + ElasticsearchException t = new ElasticsearchException("test"); + t.addHeader("header", "is here"); transport.handleRemoteError(capturedRequest.requestId, t); assertTrue(listener.isDone()); try { listener.get(); fail("Expected exception but returned proper result"); } catch (ExecutionException ex) { - assertThat(ex.getCause().getCause(), equalTo(t)); + final Throwable cause = ex.getCause().getCause(); + assertThat(cause, instanceOf(ElasticsearchException.class)); + final ElasticsearchException es = (ElasticsearchException) cause; + assertThat(es.getMessage(), equalTo(t.getMessage())); + assertThat(es.getHeader("header"), equalTo(t.getHeader("header"))); } } } @@ -342,7 +348,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { protected void masterOperation(Request request, ClusterState state, ActionListener listener) throws Exception { // The other node has become master, simulate failures of this node while publishing cluster state through ZenDiscovery setState(clusterService, ClusterStateCreationUtils.state(localNode, remoteNode, allNodes)); - Throwable failure = randomBoolean() + Exception failure = randomBoolean() ? new Discovery.FailedToCommitClusterStateException("Fake error") : new NotMasterException("Fake error"); listener.onFailure(failure); diff --git a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index a15f89bced4..ae8ea4a0b95 100644 --- a/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.CapturingTransport; import org.elasticsearch.threadpool.TestThreadPool; @@ -236,7 +236,7 @@ public class TransportNodesActionTests extends ESTestCase { private static DiscoveryNode newNode(int nodeId, Map attributes, Set roles) { String node = "node_" + nodeId; - return new DiscoveryNode(node, node, DummyTransportAddress.INSTANCE, attributes, roles, Version.CURRENT); + return new DiscoveryNode(node, node, LocalTransportAddress.buildUnique(), attributes, roles, Version.CURRENT); } private static class TestTransportNodesAction diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java b/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java index dc40fda3f8e..7496bb85faf 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/ClusterStateCreationUtils.java @@ -34,7 +34,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.shard.ShardId; import java.util.Arrays; @@ -220,7 +220,6 @@ public class ClusterStateCreationUtils { * Creates a cluster state with no index */ public static ClusterState stateWithNoShard() { - int numberOfNodes = 2; DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); discoBuilder.localNodeId(newNode(0).getId()); discoBuilder.masterNodeId(newNode(1).getId()); @@ -256,11 +255,11 @@ public class ClusterStateCreationUtils { } private static DiscoveryNode newNode(int nodeId) { - return new DiscoveryNode("node_" + nodeId, DummyTransportAddress.INSTANCE, Collections.emptyMap(), + return new DiscoveryNode("node_" + nodeId, LocalTransportAddress.buildUnique(), Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT); } - static private String selectAndRemove(Set strings) { + private static String selectAndRemove(Set strings) { String selection = randomFrom(strings.toArray(new String[strings.size()])); strings.remove(selection); return selection; diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java index 55e2a9d3cf2..9f41f0e37c2 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/ReplicationOperationTests.java @@ -81,11 +81,11 @@ public class ReplicationOperationTests extends ESTestCase { final Set expectedReplicas = getExpectedReplicas(shardId, state); - final Map expectedFailures = new HashMap<>(); + final Map expectedFailures = new HashMap<>(); final Set expectedFailedShards = new HashSet<>(); for (ShardRouting replica : expectedReplicas) { if (randomBoolean()) { - Throwable t; + Exception t; boolean criticalFailure = randomBoolean(); if (criticalFailure) { t = new CorruptIndexException("simulated", (String) null); @@ -166,7 +166,7 @@ public class ReplicationOperationTests extends ESTestCase { final Set expectedReplicas = getExpectedReplicas(shardId, state); - final Map expectedFailures = new HashMap<>(); + final Map expectedFailures = new HashMap<>(); final ShardRouting failedReplica = randomFrom(new ArrayList<>(expectedReplicas)); expectedFailures.put(failedReplica, new CorruptIndexException("simulated", (String) null)); @@ -175,9 +175,9 @@ public class ReplicationOperationTests extends ESTestCase { final ClusterState finalState = state; final TestReplicaProxy replicasProxy = new TestReplicaProxy(expectedFailures) { @Override - public void failShard(ShardRouting replica, ShardRouting primary, String message, Throwable throwable, - Runnable onSuccess, Consumer onPrimaryDemoted, - Consumer onIgnoredFailure) { + public void failShard(ShardRouting replica, ShardRouting primary, String message, Exception exception, + Runnable onSuccess, Consumer onPrimaryDemoted, + Consumer onIgnoredFailure) { assertThat(replica, equalTo(failedReplica)); onPrimaryDemoted.accept(new ElasticsearchException("the king is dead")); } @@ -185,7 +185,7 @@ public class ReplicationOperationTests extends ESTestCase { AtomicBoolean primaryFailed = new AtomicBoolean(); final TestPrimary primary = new TestPrimary(primaryShard, primaryTerm) { @Override - public void failShard(String message, Throwable throwable) { + public void failShard(String message, Exception exception) { assertTrue(primaryFailed.compareAndSet(false, true)); } }; @@ -376,8 +376,8 @@ public class ReplicationOperationTests extends ESTestCase { } @Override - public void failShard(String message, Throwable throwable) { - throw new AssertionError("should shouldn't be failed with [" + message + "]", throwable); + public void failShard(String message, Exception exception) { + throw new AssertionError("should shouldn't be failed with [" + message + "]", exception); } @Override @@ -415,7 +415,7 @@ public class ReplicationOperationTests extends ESTestCase { static class TestReplicaProxy implements ReplicationOperation.Replicas { - final Map opFailures; + final Map opFailures; final Set failedReplicas = ConcurrentCollections.newConcurrentSet(); @@ -423,7 +423,7 @@ public class ReplicationOperationTests extends ESTestCase { this(Collections.emptyMap()); } - TestReplicaProxy(Map opFailures) { + TestReplicaProxy(Map opFailures) { this.opFailures = opFailures; } @@ -438,8 +438,8 @@ public class ReplicationOperationTests extends ESTestCase { } @Override - public void failShard(ShardRouting replica, ShardRouting primary, String message, Throwable throwable, Runnable onSuccess, - Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { + public void failShard(ShardRouting replica, ShardRouting primary, String message, Exception exception, Runnable onSuccess, + Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { if (failedReplicas.add(replica) == false) { fail("replica [" + replica + "] was failed twice"); } diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 229e3b6635e..de2ddabb0fe 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -390,7 +390,15 @@ public class TransportReplicationActionTests extends ESTestCase { PlainActionFuture listener = new PlainActionFuture<>(); ReplicationTask task = maybeTask(); AtomicBoolean executed = new AtomicBoolean(); - Action.PrimaryOperationTransportHandler primaryPhase = action.new PrimaryOperationTransportHandler() { + + ShardRouting primaryShard = state.getRoutingTable().shardRoutingTable(shardId).primaryShard(); + boolean executeOnPrimary = true; + // whether shard has been marked as relocated already (i.e. relocation completed) + if (primaryShard.relocating() && randomBoolean()) { + isRelocated.set(true); + executeOnPrimary = false; + } + action.new AsyncPrimaryAction(request, createTransportChannel(listener), task) { @Override protected ReplicationOperation createReplicatedOperation(Request request, ActionListener actionListener, Action.PrimaryShardReference primaryShardReference, @@ -403,15 +411,7 @@ public class TransportReplicationActionTests extends ESTestCase { } }; } - }; - ShardRouting primaryShard = state.getRoutingTable().shardRoutingTable(shardId).primaryShard(); - boolean executeOnPrimary = true; - // whether shard has been marked as relocated already (i.e. relocation completed) - if (primaryShard.relocating() && randomBoolean()) { - isRelocated.set(true); - executeOnPrimary = false; - } - primaryPhase.messageReceived(request, createTransportChannel(listener), task); + }.run(); if (executeOnPrimary) { assertTrue(executed.get()); assertTrue(listener.isDone()); @@ -445,7 +445,7 @@ public class TransportReplicationActionTests extends ESTestCase { PlainActionFuture listener = new PlainActionFuture<>(); ReplicationTask task = maybeTask(); AtomicBoolean executed = new AtomicBoolean(); - Action.PrimaryOperationTransportHandler primaryPhase = action.new PrimaryOperationTransportHandler() { + action.new AsyncPrimaryAction(request, createTransportChannel(listener), task) { @Override protected ReplicationOperation createReplicatedOperation(Request request, ActionListener actionListener, Action.PrimaryShardReference primaryShardReference, @@ -458,8 +458,7 @@ public class TransportReplicationActionTests extends ESTestCase { } }; } - }; - primaryPhase.messageReceived(request, createTransportChannel(listener), task); + }.run(); assertThat(executed.get(), equalTo(true)); assertPhase(task, "finished"); } @@ -579,16 +578,18 @@ public class TransportReplicationActionTests extends ESTestCase { metaData.put(IndexMetaData.builder(metaData.get(index)).settings(settings)); state = ClusterState.builder(state).metaData(metaData).build(); setState(clusterService, state); - Action.PrimaryOperationTransportHandler primaryPhase = action.new PrimaryOperationTransportHandler() { + AtomicBoolean executed = new AtomicBoolean(); + action.new AsyncPrimaryAction(new Request(shardId), createTransportChannel(new PlainActionFuture<>()), null) { @Override protected ReplicationOperation createReplicatedOperation(Request request, ActionListener actionListener, Action.PrimaryShardReference primaryShardReference, boolean executeOnReplicas) { assertFalse(executeOnReplicas); + assertFalse(executed.getAndSet(true)); return new NoopReplicationOperation(request, actionListener); } - }; - primaryPhase.messageReceived(new Request(shardId), createTransportChannel(new PlainActionFuture<>()), null); + }.run(); + assertThat(executed.get(), equalTo(true)); } public void testCounterOnPrimary() throws Exception { @@ -604,17 +605,16 @@ public class TransportReplicationActionTests extends ESTestCase { final boolean throwExceptionOnCreation = i == 1; final boolean throwExceptionOnRun = i == 2; final boolean respondWithError = i == 3; - Action.PrimaryOperationTransportHandler primaryPhase = action.new PrimaryOperationTransportHandler() { - + action.new AsyncPrimaryAction(request, createTransportChannel(listener), task) { @Override protected ReplicationOperation createReplicatedOperation(Request request, - ActionListener listener, Action.PrimaryShardReference primaryShardReference, + ActionListener actionListener, Action.PrimaryShardReference primaryShardReference, boolean executeOnReplicas) { assertIndexShardCounter(1); if (throwExceptionOnCreation) { throw new ElasticsearchException("simulated exception, during createReplicatedOperation"); } - return new NoopReplicationOperation(request, listener) { + return new NoopReplicationOperation(request, actionListener) { @Override public void execute() throws Exception { assertIndexShardCounter(1); @@ -629,18 +629,7 @@ public class TransportReplicationActionTests extends ESTestCase { } }; } - }; - try { - primaryPhase.messageReceived(request, createTransportChannel(listener), task); - } catch (ElasticsearchException e) { - if (throwExceptionOnCreation || throwExceptionOnRun) { - assertThat(e.getMessage(), containsString("simulated")); - assertIndexShardCounter(0); - return; // early terminate - } else { - throw e; - } - } + }.run(); assertIndexShardCounter(0); assertTrue(listener.isDone()); assertPhase(task, "finished"); @@ -648,7 +637,7 @@ public class TransportReplicationActionTests extends ESTestCase { try { listener.get(); } catch (ExecutionException e) { - if (respondWithError) { + if (throwExceptionOnCreation || throwExceptionOnRun || respondWithError) { Throwable cause = e.getCause(); assertThat(cause, instanceOf(ElasticsearchException.class)); assertThat(cause.getMessage(), containsString("simulated")); @@ -787,16 +776,16 @@ public class TransportReplicationActionTests extends ESTestCase { } @Override - protected PrimaryShardReference getPrimaryShardReference(ShardId shardId) { + protected void acquirePrimaryShardReference(ShardId shardId, ActionListener onReferenceAcquired) { count.incrementAndGet(); - return new PrimaryShardReference(null, null) { + PrimaryShardReference primaryShardReference = new PrimaryShardReference(null, null) { @Override public boolean isRelocated() { return isRelocated.get(); } @Override - public void failShard(String reason, @Nullable Throwable e) { + public void failShard(String reason, @Nullable Exception e) { throw new UnsupportedOperationException(); } @@ -812,13 +801,15 @@ public class TransportReplicationActionTests extends ESTestCase { public void close() { count.decrementAndGet(); } - }; + + onReferenceAcquired.onResponse(primaryShardReference); } - protected Releasable acquireReplicaOperationLock(ShardId shardId, long primaryTerm) { + @Override + protected void acquireReplicaOperationLock(ShardId shardId, long primaryTerm, ActionListener onLockAcquired) { count.incrementAndGet(); - return count::decrementAndGet; + onLockAcquired.onResponse(count::decrementAndGet); } } @@ -865,9 +856,9 @@ public class TransportReplicationActionTests extends ESTestCase { } @Override - public void sendResponse(Throwable error) throws IOException { - consumer.accept(error); - listener.onFailure(error); + public void sendResponse(Exception exception) throws IOException { + consumer.accept(exception); + listener.onFailure(exception); } @Override diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java index 7b312959631..80e689743fd 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java @@ -179,7 +179,7 @@ public class TransportWriteActionTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { throw new RuntimeException(e); } } diff --git a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java index c26d376b587..37abc4d5eed 100644 --- a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java @@ -179,9 +179,9 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { action.new AsyncSingleAction(request, listener).start(); listener.get(); fail("expected ClusterBlockException"); - } catch (Throwable t) { - if (ExceptionsHelper.unwrap(t, ClusterBlockException.class) == null) { - logger.info("expected ClusterBlockException but got ", t); + } catch (Exception e) { + if (ExceptionsHelper.unwrap(e, ClusterBlockException.class) == null) { + logger.info("expected ClusterBlockException but got ", e); fail("expected ClusterBlockException"); } } @@ -317,9 +317,9 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { assertThat(transport.capturedRequests().length, equalTo(0)); try { listener.get(); - } catch (Throwable t) { - if (ExceptionsHelper.unwrap(t, IllegalStateException.class) == null) { - logger.info("expected IllegalStateException but got ", t); + } catch (Exception e) { + if (ExceptionsHelper.unwrap(e, IllegalStateException.class) == null) { + logger.info("expected IllegalStateException but got ", e); fail("expected and IllegalStateException"); } } diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index 208945a6179..1bba4cac3dd 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -68,10 +68,10 @@ import static org.hamcrest.Matchers.equalTo; public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase { protected static class TestFieldSetting { - final public String name; - final public boolean storedOffset; - final public boolean storedPayloads; - final public boolean storedPositions; + public final String name; + public final boolean storedOffset; + public final boolean storedPayloads; + public final boolean storedPositions; public TestFieldSetting(String name, boolean storedOffset, boolean storedPayloads, boolean storedPositions) { this.name = name; @@ -124,9 +124,9 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase { } protected static class TestDoc { - final public String id; - final public TestFieldSetting[] fieldSettings; - final public String[] fieldContent; + public final String id; + public final TestFieldSetting[] fieldSettings; + public final String[] fieldContent; public String index = "test"; public String alias = "alias"; public String type = "type1"; @@ -163,11 +163,11 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase { } protected static class TestConfig { - final public TestDoc doc; - final public String[] selectedFields; - final public boolean requestPositions; - final public boolean requestOffsets; - final public boolean requestPayloads; + public final TestDoc doc; + public final String[] selectedFields; + public final boolean requestPositions; + public final boolean requestOffsets; + public final boolean requestPayloads; public Class expectedException = null; public TestConfig(TestDoc doc, String[] selectedFields, boolean requestPositions, boolean requestOffsets, boolean requestPayloads) { @@ -213,8 +213,6 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase { .put("index.analysis.analyzer.tv_test.tokenizer", "standard") .putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase"); assertAcked(prepareCreate(index).addMapping("type1", mappingBuilder).setSettings(settings).addAlias(new Alias(alias))); - - ensureYellow(); } /** diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java index 37a1bc92e9c..1611c63d2ba 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsCheckDocFreqIT.java @@ -140,7 +140,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase { xBuilder.startObject(); response.toXContent(xBuilder, null); xBuilder.endObject(); - String utf8 = xBuilder.bytes().toUtf8().replaceFirst("\"took\":\\d+,", "");; + String utf8 = xBuilder.bytes().utf8ToString().replaceFirst("\"took\":\\d+,", "");; String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"" + i + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"terms\":{\"brown\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"doc_freq\":15,\"ttf\":30,\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}"; @@ -196,7 +196,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase { xBuilder.startObject(); response.toXContent(xBuilder, null); xBuilder.endObject(); - String utf8 = xBuilder.bytes().toUtf8().replaceFirst("\"took\":\\d+,", "");; + String utf8 = xBuilder.bytes().utf8ToString().replaceFirst("\"took\":\\d+,", "");; String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"" + i + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"field_statistics\":{\"sum_doc_freq\":120,\"doc_count\":15,\"sum_ttf\":135},\"terms\":{\"brown\":{\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}"; @@ -255,7 +255,7 @@ public class GetTermVectorsCheckDocFreqIT extends ESIntegTestCase { xBuilder.startObject(); response.toXContent(xBuilder, ToXContent.EMPTY_PARAMS); xBuilder.endObject(); - String utf8 = xBuilder.bytes().toUtf8().replaceFirst("\"took\":\\d+,", "");; + String utf8 = xBuilder.bytes().utf8ToString().replaceFirst("\"took\":\\d+,", "");; String expectedString = "{\"_index\":\"test\",\"_type\":\"type1\",\"_id\":\"" + i + "\",\"_version\":1,\"found\":true,\"term_vectors\":{\"field\":{\"field_statistics\":{\"sum_doc_freq\":120,\"doc_count\":15,\"sum_ttf\":135},\"terms\":{\"brown\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":2,\"start_offset\":10,\"end_offset\":15,\"payload\":\"d29yZA==\"}]},\"dog\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":8,\"start_offset\":40,\"end_offset\":43,\"payload\":\"d29yZA==\"}]},\"fox\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":3,\"start_offset\":16,\"end_offset\":19,\"payload\":\"d29yZA==\"}]},\"jumps\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":4,\"start_offset\":20,\"end_offset\":25,\"payload\":\"d29yZA==\"}]},\"lazy\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":7,\"start_offset\":35,\"end_offset\":39,\"payload\":\"d29yZA==\"}]},\"over\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":5,\"start_offset\":26,\"end_offset\":30,\"payload\":\"d29yZA==\"}]},\"quick\":{\"doc_freq\":15,\"ttf\":15,\"term_freq\":1,\"tokens\":[{\"position\":1,\"start_offset\":4,\"end_offset\":9,\"payload\":\"d29yZA==\"}]},\"the\":{\"doc_freq\":15,\"ttf\":30,\"term_freq\":2,\"tokens\":[{\"position\":0,\"start_offset\":0,\"end_offset\":3,\"payload\":\"d29yZA==\"},{\"position\":6,\"start_offset\":31,\"end_offset\":34,\"payload\":\"d29yZA==\"}]}}}}}"; diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java index 12af9f8a2c2..3835edbbe9a 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.mapper.FieldMapper; -import org.hamcrest.Matcher; import java.io.IOException; import java.util.ArrayList; @@ -55,7 +54,6 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -71,8 +69,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { .endObject().endObject(); assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); - ensureYellow(); - client().prepareIndex("test", "type1", "666").setSource("field", "foo bar").execute().actionGet(); refresh(); for (int i = 0; i < 20; i++) { @@ -97,8 +93,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { .endObject().endObject(); assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); - ensureYellow(); - // when indexing a field that simply has a question mark, the term vectors will be null client().prepareIndex("test", "type1", "0").setSource("existingfield", "?").execute().actionGet(); refresh(); @@ -124,8 +118,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { .endObject().endObject(); assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("type1", mapping)); - ensureYellow(); - // when indexing a field that simply has a question mark, the term vectors will be null client().prepareIndex("test", "type1", "0").setSource("anotherexistingfield", 1).execute().actionGet(); refresh(); @@ -154,8 +146,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { "field4", "type=keyword", // yes tvs "field5", "type=text,index=true")); // yes tvs - ensureYellow(); - List indexBuilders = new ArrayList<>(); for (int i = 0; i < 6; i++) { indexBuilders.add(client().prepareIndex() @@ -200,7 +190,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { .put(indexSettings()) .put("index.analysis.analyzer.tv_test.tokenizer", "whitespace") .putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase"))); - ensureYellow(); for (int i = 0; i < 10; i++) { client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog") @@ -286,7 +275,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { .setSettings(Settings.builder() .put("index.analysis.analyzer.tv_test.tokenizer", "whitespace") .putArray("index.analysis.analyzer.tv_test.filter", "type_as_payload", "lowercase"))); - ensureYellow(); for (int i = 0; i < 10; i++) { client().prepareIndex("test", "type1", Integer.toString(i)) .setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog") @@ -391,19 +379,15 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { TestConfig[] testConfigs = generateTestConfigs(20, testDocs, testFieldSettings); for (TestConfig test : testConfigs) { - try { - TermVectorsRequestBuilder request = getRequestForConfig(test); - if (test.expectedException != null) { - assertThrows(request, test.expectedException); - continue; - } - - TermVectorsResponse response = request.get(); - Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc); - validateResponse(response, luceneTermVectors, test); - } catch (Throwable t) { - throw new Exception("Test exception while running " + test.toString(), t); + TermVectorsRequestBuilder request = getRequestForConfig(test); + if (test.expectedException != null) { + assertThrows(request, test.expectedException); + continue; } + + TermVectorsResponse response = request.get(); + Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc); + validateResponse(response, luceneTermVectors, test); } } @@ -436,7 +420,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { .put("index.analysis.filter.my_delimited_payload_filter.delimiter", delimiter) .put("index.analysis.filter.my_delimited_payload_filter.encoding", encodingString) .put("index.analysis.filter.my_delimited_payload_filter.type", "delimited_payload_filter"))); - ensureYellow(); client().prepareIndex("test", "type1", Integer.toString(1)) .setSource(jsonBuilder().startObject().field("field", queryString).endObject()).execute().actionGet(); @@ -963,21 +946,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { return randomBoolean() ? "test" : "alias"; } - private Map getFieldStatistics(Map stats, String fieldName) throws IOException { - return (Map) ((Map) stats.get(fieldName)).get("field_statistics"); - } - - private Map getTermStatistics(Map stats, String fieldName, String term) { - return (Map) ((Map) ((Map) stats.get(fieldName)).get("terms")).get(term); - } - - private Matcher equalOrLessThanTo(Integer value, boolean isEqual) { - if (isEqual) { - return equalTo(value); - } - return lessThan(value); - } - public void testTermVectorsWithVersion() { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.builder().put("index.refresh_interval", -1))); @@ -1089,7 +1057,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { assertAcked(prepareCreate("test") .setSettings(settings) .addMapping("type1", "tags", "type=text")); - ensureYellow(); int numTerms = scaledRandomIntBetween(10, 50); logger.info("Indexing one document with tags of increasing length ..."); @@ -1127,7 +1094,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { assertAcked(prepareCreate("test") .setSettings(settings) .addMapping("type1", "tags", "type=text")); - ensureYellow(); logger.info("Indexing one document with tags of increasing frequencies ..."); int numTerms = scaledRandomIntBetween(10, 50); @@ -1168,7 +1134,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { assertAcked(prepareCreate("test") .setSettings(settings) .addMapping("type1", "tags", "type=text")); - ensureYellow(); int numDocs = scaledRandomIntBetween(10, 50); // as many terms as there are docs logger.info("Indexing {} documents with tags of increasing dfs ...", numDocs); diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/MultiTermVectorsIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/MultiTermVectorsIT.java index 57a89c82cc8..5ed4f3252d5 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/MultiTermVectorsIT.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/MultiTermVectorsIT.java @@ -56,21 +56,16 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase { for (int i = 0; i < testConfigs.length; i++) { TestConfig test = testConfigs[i]; - try { - MultiTermVectorsItemResponse item = responseItems[i]; - if (test.expectedException != null) { - assertTrue(item.isFailed()); - continue; - } else if (item.isFailed()) { - fail(item.getFailure().getCause().getMessage()); - } - Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc); - validateResponse(item.getResponse(), luceneTermVectors, test); - } catch (Throwable t) { - throw new Exception("Test exception while running " + test.toString(), t); + MultiTermVectorsItemResponse item = responseItems[i]; + if (test.expectedException != null) { + assertTrue(item.isFailed()); + continue; + } else if (item.isFailed()) { + fail(item.getFailure().getCause().getMessage()); } + Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc); + validateResponse(item.getResponse(), luceneTermVectors, test); } - } public void testMissingIndexThrowsMissingIndex() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 597a2a4db39..a98433a1007 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -59,6 +59,18 @@ public class UpdateRequestTests extends ESTestCase { Map params = script.getParams(); assertThat(params, nullValue()); + // simple verbose script + request.source(XContentFactory.jsonBuilder().startObject() + .startObject("script").field("inline", "script1").endObject() + .endObject()); + script = request.script(); + assertThat(script, notNullValue()); + assertThat(script.getScript(), equalTo("script1")); + assertThat(script.getType(), equalTo(ScriptType.INLINE)); + assertThat(script.getLang(), nullValue()); + params = script.getParams(); + assertThat(params, nullValue()); + // script with params request = new UpdateRequest("test", "type", "1"); request.source(XContentFactory.jsonBuilder().startObject().startObject("script").field("inline", "script1").startObject("params") @@ -135,7 +147,7 @@ public class UpdateRequestTests extends ESTestCase { TimeValue providedTTLValue = TimeValue.parseTimeValue(randomTimeValue(), null, "ttl"); Settings settings = settings(Version.CURRENT).build(); - UpdateHelper updateHelper = new UpdateHelper(settings, null, null); + UpdateHelper updateHelper = new UpdateHelper(settings, null); // We just upsert one document with ttl IndexRequest indexRequest = new IndexRequest("test", "type1", "1") diff --git a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java index 5ac1bf40af6..28d068d7608 100644 --- a/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/core/src/test/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -951,8 +951,6 @@ public class IndexAliasesIT extends ESIntegTestCase { createIndex("index1"); createIndex("index2"); - ensureYellow(); - assertAcked(admin().indices().prepareAliases().addAlias("index1", "alias1").addAlias("index2", "alias2")); GetAliasesResponse response = admin().indices().prepareGetAliases().get(); diff --git a/core/src/test/java/org/elasticsearch/blocks/SimpleBlocksIT.java b/core/src/test/java/org/elasticsearch/blocks/SimpleBlocksIT.java index ffe82f9388d..699b919cf05 100644 --- a/core/src/test/java/org/elasticsearch/blocks/SimpleBlocksIT.java +++ b/core/src/test/java/org/elasticsearch/blocks/SimpleBlocksIT.java @@ -141,15 +141,6 @@ public class SimpleBlocksIT extends ESIntegTestCase { } } - private void canNotIndexExists(String index) { - try { - IndicesExistsResponse r = client().admin().indices().prepareExists(index).execute().actionGet(); - fail(); - } catch (ClusterBlockException e) { - // all is well - } - } - private void setIndexReadOnly(String index, Object value) { HashMap newSettings = new HashMap<>(); newSettings.put(IndexMetaData.SETTING_READ_ONLY, value); diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java index 5336066b1e2..1c4cd5b4e87 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapCheckTests.java @@ -44,6 +44,7 @@ import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class BootstrapCheckTests extends ESTestCase { @@ -65,6 +66,23 @@ public class BootstrapCheckTests extends ESTestCase { BootstrapCheck.check(Settings.EMPTY, boundTransportAddress); } + public void testNoLogMessageInNonProductionMode() { + final ESLogger logger = mock(ESLogger.class); + BootstrapCheck.check(false, randomBoolean(), Collections.emptyList(), logger); + verifyNoMoreInteractions(logger); + } + + public void testLogMessageInProductionMode() { + final ESLogger logger = mock(ESLogger.class); + final boolean ignoreSystemChecks = randomBoolean(); + BootstrapCheck.check(true, ignoreSystemChecks, Collections.emptyList(), logger); + verify(logger).info("bound or publishing to a non-loopback or non-link-local address, enforcing bootstrap checks"); + if (ignoreSystemChecks) { + verify(logger).warn("enforcing bootstrap checks but ignoring system bootstrap checks, consider not ignoring system checks"); + } + verifyNoMoreInteractions(logger); + } + public void testEnforceLimitsWhenBoundToNonLocalAddress() { final List transportAddresses = new ArrayList<>(); final TransportAddress nonLocalTransportAddress = mock(TransportAddress.class); @@ -545,12 +563,16 @@ public class BootstrapCheckTests extends ESTestCase { // nothing should happen if we ignore system checks BootstrapCheck.check(true, true, Collections.singletonList(check), logger); + verify(logger).info("bound or publishing to a non-loopback or non-link-local address, enforcing bootstrap checks"); + verify(logger).warn("enforcing bootstrap checks but ignoring system bootstrap checks, consider not ignoring system checks"); verify(logger).warn("error"); + verifyNoMoreInteractions(logger); reset(logger); // nothing should happen if we ignore all checks BootstrapCheck.check(false, randomBoolean(), Collections.singletonList(check), logger); verify(logger).warn("error"); + verifyNoMoreInteractions(logger); } public void testAlwaysEnforcedChecks() { diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java index 8b8a4d947a9..f8bdf244999 100644 --- a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java +++ b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchCliTests.java @@ -24,10 +24,12 @@ import org.elasticsearch.Version; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.monitor.jvm.JvmInfo; +import java.nio.file.Path; import java.util.function.Consumer; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.hasEntry; public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { @@ -50,7 +52,8 @@ public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { ExitCodes.USAGE, output -> assertThat( output, - containsString("ERROR: Elasticsearch version option is mutually exclusive with any other option")), + allOf(containsString("ERROR:"), + containsString("are unavailable given other options on the command line"))), args); } @@ -91,18 +94,22 @@ public class ElasticsearchCliTests extends ESElasticsearchCliTestCase { } public void testThatPidFileCanBeConfigured() throws Exception { - runPidFileTest(ExitCodes.USAGE, false, output -> assertThat(output, containsString("Option p/pidfile requires an argument")), "-p"); - runPidFileTest(ExitCodes.OK, true, output -> {}, "-p", "/tmp/pid"); - runPidFileTest(ExitCodes.OK, true, output -> {}, "--pidfile", "/tmp/pid"); + Path tmpDir = createTempDir(); + Path pidFile = tmpDir.resolve("pid"); + runPidFileTest(ExitCodes.USAGE, false, + output -> assertThat(output, containsString("Option p/pidfile requires an argument")), pidFile, "-p"); + runPidFileTest(ExitCodes.OK, true, output -> {}, pidFile, "-p", pidFile.toString()); + runPidFileTest(ExitCodes.OK, true, output -> {}, pidFile, "--pidfile", tmpDir.toString() + "/pid"); } - private void runPidFileTest(final int expectedStatus, final boolean expectedInit, Consumer outputConsumer, final String... args) + private void runPidFileTest(final int expectedStatus, final boolean expectedInit, Consumer outputConsumer, + Path expectedPidFile, final String... args) throws Exception { runTest( expectedStatus, expectedInit, outputConsumer, - (foreground, pidFile, esSettings) -> assertThat(pidFile, equalTo("/tmp/pid")), + (foreground, pidFile, esSettings) -> assertThat(pidFile.toString(), equalTo(expectedPidFile.toString())), args); } diff --git a/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java new file mode 100644 index 00000000000..e4ff83e9b40 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/bootstrap/ElasticsearchUncaughtExceptionHandlerTests.java @@ -0,0 +1,152 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.bootstrap; + +import org.apache.lucene.index.MergePolicy; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.io.IOError; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class ElasticsearchUncaughtExceptionHandlerTests extends ESTestCase { + + private Map, Integer> expectedStatus; + + @Before + public void setUp() throws Exception { + super.setUp(); + Map, Integer> expectedStatus = new HashMap<>(); + expectedStatus.put(InternalError.class, 128); + expectedStatus.put(OutOfMemoryError.class, 127); + expectedStatus.put(StackOverflowError.class, 126); + expectedStatus.put(UnknownError.class, 125); + expectedStatus.put(IOError.class, 124); + this.expectedStatus = Collections.unmodifiableMap(expectedStatus); + } + + public void testUncaughtError() throws InterruptedException { + final Error error = randomFrom( + new InternalError(), + new OutOfMemoryError(), + new StackOverflowError(), + new UnknownError(), + new IOError(new IOException("fatal")), + new Error() {}); + final Thread thread = new Thread(() -> { throw error; }); + final String name = randomAsciiOfLength(10); + thread.setName(name); + final AtomicBoolean halt = new AtomicBoolean(); + final AtomicInteger observedStatus = new AtomicInteger(); + final AtomicReference threadNameReference = new AtomicReference<>(); + final AtomicReference throwableReference = new AtomicReference<>(); + thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler(() -> "testUncaughtError") { + + @Override + void halt(int status) { + halt.set(true); + observedStatus.set(status); + } + + @Override + void onFatalUncaught(String threadName, Throwable t) { + threadNameReference.set(threadName); + throwableReference.set(t); + } + + @Override + void onNonFatalUncaught(String threadName, Throwable t) { + fail(); + } + + }); + thread.start(); + thread.join(); + assertTrue(halt.get()); + final int status; + if (expectedStatus.containsKey(error.getClass())) { + status = expectedStatus.get(error.getClass()); + } else { + status = 1; + } + assertThat(observedStatus.get(), equalTo(status)); + assertThat(threadNameReference.get(), equalTo(name)); + assertThat(throwableReference.get(), equalTo(error)); + } + + public void testUncaughtException() throws InterruptedException { + final RuntimeException e = new RuntimeException("boom"); + final Thread thread = new Thread(() -> { throw e; }); + final String name = randomAsciiOfLength(10); + thread.setName(name); + final AtomicReference threadNameReference = new AtomicReference<>(); + final AtomicReference throwableReference = new AtomicReference<>(); + thread.setUncaughtExceptionHandler(new ElasticsearchUncaughtExceptionHandler(() -> "testUncaughtException") { + @Override + void halt(int status) { + fail(); + } + + @Override + void onFatalUncaught(String threadName, Throwable t) { + fail(); + } + + @Override + void onNonFatalUncaught(String threadName, Throwable t) { + threadNameReference.set(threadName); + throwableReference.set(t); + } + }); + thread.start(); + thread.join(); + assertThat(threadNameReference.get(), equalTo(name)); + assertThat(throwableReference.get(), equalTo(e)); + } + + public void testIsFatalCause() { + assertFatal(new MergePolicy.MergeException(new OutOfMemoryError(), null)); + assertFatal(new OutOfMemoryError()); + assertFatal(new StackOverflowError()); + assertFatal(new InternalError()); + assertFatal(new UnknownError()); + assertFatal(new IOError(new IOException())); + assertNonFatal(new RuntimeException()); + assertNonFatal(new UncheckedIOException(new IOException())); + } + + private void assertFatal(Throwable cause) { + assertTrue(ElasticsearchUncaughtExceptionHandler.isFatalUncaught(cause)); + } + + private void assertNonFatal(Throwable cause) { + assertFalse(ElasticsearchUncaughtExceptionHandler.isFatalUncaught(cause)); + } + +} diff --git a/core/src/test/java/org/elasticsearch/broadcast/BroadcastActionsIT.java b/core/src/test/java/org/elasticsearch/broadcast/BroadcastActionsIT.java index f60709d6da0..63091a97818 100644 --- a/core/src/test/java/org/elasticsearch/broadcast/BroadcastActionsIT.java +++ b/core/src/test/java/org/elasticsearch/broadcast/BroadcastActionsIT.java @@ -44,8 +44,6 @@ public class BroadcastActionsIT extends ESIntegTestCase { NumShards numShards = getNumShards("test"); logger.info("Running Cluster Health"); - ensureYellow(); - client().index(indexRequest("test").type("type1").id("1").source(source("1", "test"))).actionGet(); flush(); client().index(indexRequest("test").type("type1").id("2").source(source("2", "test"))).actionGet(); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java index 40995ff778b..f1451255b60 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicAnalysisBackwardCompatibilityIT.java @@ -41,7 +41,7 @@ import static org.hamcrest.Matchers.equalTo; public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase { // This pattern match characters with Line_Break = Complex_Content. - final static Pattern complexUnicodeChars = Pattern.compile("[\u17B4\u17B5\u17D3\u17CB-\u17D1\u17DD\u1036\u17C6\u1A74\u1038\u17C7\u0E4E\u0E47-\u0E4D\u0EC8-\u0ECD\uAABF\uAAC1\u1037\u17C8-\u17CA\u1A75-\u1A7C\u1AA8-\u1AAB\uAADE\uAADF\u1AA0-\u1AA6\u1AAC\u1AAD\u109E\u109F\uAA77-\uAA79\u0E46\u0EC6\u17D7\u1AA7\uA9E6\uAA70\uAADD\u19DA\u0E01-\u0E3A\u0E40-\u0E45\u0EDE\u0E81\u0E82\u0E84\u0E87\u0E88\u0EAA\u0E8A\u0EDF\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAB\u0EDC\u0EDD\u0EAD-\u0EB9\u0EBB-\u0EBD\u0EC0-\u0EC4\uAA80-\uAABE\uAAC0\uAAC2\uAADB\uAADC\u1000\u1075\u1001\u1076\u1002\u1077\uAA60\uA9E9\u1003\uA9E0\uA9EA\u1004\u105A\u1005\u1078\uAA61\u1006\uA9E1\uAA62\uAA7E\u1007\uAA63\uA9EB\u1079\uAA72\u1008\u105B\uA9E2\uAA64\uA9EC\u1061\uAA7F\u1009\u107A\uAA65\uA9E7\u100A\u100B\uAA66\u100C\uAA67\u100D\uAA68\uA9ED\u100E\uAA69\uA9EE\u100F\u106E\uA9E3\uA9EF\u1010-\u1012\u107B\uA9FB\u1013\uAA6A\uA9FC\u1014\u107C\uAA6B\u105E\u1015\u1016\u107D\u107E\uAA6F\u108E\uA9E8\u1017\u107F\uA9FD\u1018\uA9E4\uA9FE\u1019\u105F\u101A\u103B\u101B\uAA73\uAA7A\u103C\u101C\u1060\u101D\u103D\u1082\u1080\u1050\u1051\u1065\u101E\u103F\uAA6C\u101F\u1081\uAA6D\u103E\uAA6E\uAA71\u1020\uA9FA\u105C\u105D\u106F\u1070\u1066\u1021-\u1026\u1052-\u1055\u1027-\u102A\u102C\u102B\u1083\u1072\u109C\u102D\u1071\u102E\u1033\u102F\u1073\u1074\u1030\u1056-\u1059\u1031\u1084\u1035\u1085\u1032\u109D\u1034\u1062\u1067\u1068\uA9E5\u1086\u1039\u103A\u1063\u1064\u1069-\u106D\u1087\u108B\u1088\u108C\u108D\u1089\u108A\u108F\u109A\u109B\uAA7B-\uAA7D\uAA74-\uAA76\u1780-\u17A2\u17DC\u17A3-\u17B3\u17B6-\u17C5\u17D2\u1950-\u196D\u1970-\u1974\u1980-\u199C\u19DE\u19DF\u199D-\u19AB\u19B0-\u19C9\u1A20-\u1A26\u1A58\u1A59\u1A27-\u1A3B\u1A5A\u1A5B\u1A3C-\u1A46\u1A54\u1A47-\u1A4C\u1A53\u1A6B\u1A55-\u1A57\u1A5C-\u1A5E\u1A4D-\u1A52\u1A61\u1A6C\u1A62-\u1A6A\u1A6E\u1A6F\u1A73\u1A70-\u1A72\u1A6D\u1A60]"); + static final Pattern complexUnicodeChars = Pattern.compile("[\u17B4\u17B5\u17D3\u17CB-\u17D1\u17DD\u1036\u17C6\u1A74\u1038\u17C7\u0E4E\u0E47-\u0E4D\u0EC8-\u0ECD\uAABF\uAAC1\u1037\u17C8-\u17CA\u1A75-\u1A7C\u1AA8-\u1AAB\uAADE\uAADF\u1AA0-\u1AA6\u1AAC\u1AAD\u109E\u109F\uAA77-\uAA79\u0E46\u0EC6\u17D7\u1AA7\uA9E6\uAA70\uAADD\u19DA\u0E01-\u0E3A\u0E40-\u0E45\u0EDE\u0E81\u0E82\u0E84\u0E87\u0E88\u0EAA\u0E8A\u0EDF\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3\u0EA5\u0EA7\u0EAB\u0EDC\u0EDD\u0EAD-\u0EB9\u0EBB-\u0EBD\u0EC0-\u0EC4\uAA80-\uAABE\uAAC0\uAAC2\uAADB\uAADC\u1000\u1075\u1001\u1076\u1002\u1077\uAA60\uA9E9\u1003\uA9E0\uA9EA\u1004\u105A\u1005\u1078\uAA61\u1006\uA9E1\uAA62\uAA7E\u1007\uAA63\uA9EB\u1079\uAA72\u1008\u105B\uA9E2\uAA64\uA9EC\u1061\uAA7F\u1009\u107A\uAA65\uA9E7\u100A\u100B\uAA66\u100C\uAA67\u100D\uAA68\uA9ED\u100E\uAA69\uA9EE\u100F\u106E\uA9E3\uA9EF\u1010-\u1012\u107B\uA9FB\u1013\uAA6A\uA9FC\u1014\u107C\uAA6B\u105E\u1015\u1016\u107D\u107E\uAA6F\u108E\uA9E8\u1017\u107F\uA9FD\u1018\uA9E4\uA9FE\u1019\u105F\u101A\u103B\u101B\uAA73\uAA7A\u103C\u101C\u1060\u101D\u103D\u1082\u1080\u1050\u1051\u1065\u101E\u103F\uAA6C\u101F\u1081\uAA6D\u103E\uAA6E\uAA71\u1020\uA9FA\u105C\u105D\u106F\u1070\u1066\u1021-\u1026\u1052-\u1055\u1027-\u102A\u102C\u102B\u1083\u1072\u109C\u102D\u1071\u102E\u1033\u102F\u1073\u1074\u1030\u1056-\u1059\u1031\u1084\u1035\u1085\u1032\u109D\u1034\u1062\u1067\u1068\uA9E5\u1086\u1039\u103A\u1063\u1064\u1069-\u106D\u1087\u108B\u1088\u108C\u108D\u1089\u108A\u108F\u109A\u109B\uAA7B-\uAA7D\uAA74-\uAA76\u1780-\u17A2\u17DC\u17A3-\u17B3\u17B6-\u17C5\u17D2\u1950-\u196D\u1970-\u1974\u1980-\u199C\u19DE\u19DF\u199D-\u19AB\u19B0-\u19C9\u1A20-\u1A26\u1A58\u1A59\u1A27-\u1A3B\u1A5A\u1A5B\u1A3C-\u1A46\u1A54\u1A47-\u1A4C\u1A53\u1A6B\u1A55-\u1A57\u1A5C-\u1A5E\u1A4D-\u1A52\u1A61\u1A6C\u1A62-\u1A6A\u1A6E\u1A6F\u1A73\u1A70-\u1A72\u1A6D\u1A60]"); /** * Simple upgrade test for analyzers to make sure they analyze to the same tokens after upgrade @@ -59,7 +59,6 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase { assertAcked(prepareCreate("test") .addMapping("type", (Object[])fields) .setSettings(indexSettings())); - ensureYellow(); InputOutput[] inout = new InputOutput[numFields]; for (int i = 0; i < numFields; i++) { String input; diff --git a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java index 8db5b1536e8..e6d47a63bb6 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/BasicBackwardsCompatibilityIT.java @@ -155,7 +155,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { backwardsCluster().startNewNode(); } assertAcked(prepareCreate("test").setSettings(Settings.builder().put("index.routing.allocation.exclude._name", backwardsCluster().newNodePattern()).put(indexSettings()))); - ensureYellow(); assertAllShardsOnNodes("test", backwardsCluster().backwardsNodePattern()); int numDocs = randomIntBetween(100, 150); ArrayList ids = new ArrayList<>(); @@ -271,7 +270,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { */ public void testIndexUpgradeSingleNode() throws Exception { assertAcked(prepareCreate("test").setSettings(Settings.builder().put("index.routing.allocation.exclude._name", backwardsCluster().newNodePattern()).put(indexSettings()))); - ensureYellow(); int numDocs = randomIntBetween(100, 150); IndexRequestBuilder[] docs = new IndexRequestBuilder[numDocs]; for (int i = 0; i < numDocs; i++) { @@ -403,7 +401,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { for (; ; ) { indexName = "test_"+indexId++; createIndex(indexName); - ensureYellow(); indexRandom(true, client().prepareIndex(indexName, "type1", "1").setSource(jsonBuilder().startObject().startObject("obj1").field("obj1_val", "1").endObject().field("x1", "x_1").field("field1", "value1_1").field("field2", "value2_1").endObject()), client().prepareIndex(indexName, "type1", "2").setSource(jsonBuilder().startObject().startObject("obj1").field("obj1_val", "1").endObject().field("x2", "x_2").field("field1", "value1_2").endObject()), @@ -490,7 +487,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { public void testIndexGetAndDelete() throws ExecutionException, InterruptedException { createIndexWithAlias(); - ensureYellow("test"); int numDocs = iterations(10, 50); for (int i = 0; i < numDocs; i++) { @@ -526,7 +522,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { public void testUpdate() { createIndexWithAlias(); - ensureYellow("test"); UpdateRequestBuilder updateRequestBuilder = client().prepareUpdate(indexOrAlias(), "type1", "1") .setUpsert("field1", "value1").setDoc("field2", "value2"); @@ -557,7 +552,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { public void testAnalyze() { createIndexWithAlias(); assertAcked(client().admin().indices().preparePutMapping("test").setType("test").setSource("field", "type=text,analyzer=keyword")); - ensureYellow("test"); AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("this is a test").setIndex(indexOrAlias()).setField("field").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(1)); assertThat(analyzeResponse.getTokens().get(0).getTerm(), equalTo("this is a test")); @@ -565,7 +559,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { public void testExplain() { createIndexWithAlias(); - ensureYellow("test"); client().prepareIndex(indexOrAlias(), "test", "1").setSource("field", "value1").get(); refresh(); @@ -582,7 +575,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { public void testGetTermVector() throws IOException { createIndexWithAlias(); assertAcked(client().admin().indices().preparePutMapping("test").setType("type1").setSource("field", "type=text,term_vector=with_positions_offsets_payloads").get()); - ensureYellow("test"); client().prepareIndex(indexOrAlias(), "type1", "1") .setSource("field", "the quick brown fox jumps over the lazy dog").get(); @@ -598,7 +590,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { public void testIndicesStats() { createIndex("test"); - ensureYellow("test"); IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats().all().get(); assertThat(indicesStatsResponse.getIndices().size(), equalTo(1)); @@ -607,7 +598,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { public void testMultiGet() throws ExecutionException, InterruptedException { createIndexWithAlias(); - ensureYellow("test"); int numDocs = iterations(10, 50); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; @@ -639,7 +629,6 @@ public class BasicBackwardsCompatibilityIT extends ESBackcompatTestCase { public void testScroll() throws ExecutionException, InterruptedException { createIndex("test"); - ensureYellow("test"); int numDocs = iterations(10, 100); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; diff --git a/core/src/test/java/org/elasticsearch/bwcompat/ClusterStateBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/ClusterStateBackwardsCompatIT.java index a4427befea2..eb6648cad02 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/ClusterStateBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/ClusterStateBackwardsCompatIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESBackcompatTestCase; +import org.elasticsearch.transport.MockTransportClient; import java.util.HashMap; import java.util.Map; @@ -101,6 +102,6 @@ public class ClusterStateBackwardsCompatIT extends ESBackcompatTestCase { private TransportClient newTransportClient() { Settings settings = Settings.builder().put("client.transport.ignore_cluster_name", true) .put("node.name", "transport_client_" + getTestName()).build(); - return TransportClient.builder().settings(settings).build(); + return new MockTransportClient(settings); } } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/NodesStatsBasicBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/NodesStatsBasicBackwardsCompatIT.java index c9d5f0b622e..03f7f21ec55 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/NodesStatsBasicBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/NodesStatsBasicBackwardsCompatIT.java @@ -22,11 +22,11 @@ package org.elasticsearch.bwcompat; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder; -import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESBackcompatTestCase; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.MockTransportClient; import java.lang.reflect.Method; @@ -44,9 +44,9 @@ public class NodesStatsBasicBackwardsCompatIT extends ESBackcompatTestCase { // We explicitly connect to each node with a custom TransportClient for (NodeInfo n : nodesInfo.getNodes()) { - TransportClient tc = TransportClient.builder().settings(settings).build().addTransportAddress(n.getNode().getAddress()); + TransportClient tc = new MockTransportClient(settings).addTransportAddress(n.getNode().getAddress()); // Just verify that the NS can be sent and serialized/deserialized between nodes with basic indices - NodesStatsResponse ns = tc.admin().cluster().prepareNodesStats().setIndices(true).execute().actionGet(); + tc.admin().cluster().prepareNodesStats().setIndices(true).execute().actionGet(); tc.close(); } } @@ -62,7 +62,7 @@ public class NodesStatsBasicBackwardsCompatIT extends ESBackcompatTestCase { // We explicitly connect to each node with a custom TransportClient for (NodeInfo n : nodesInfo.getNodes()) { - TransportClient tc = TransportClient.builder().settings(settings).build().addTransportAddress(n.getNode().getAddress()); + TransportClient tc = new MockTransportClient(settings).addTransportAddress(n.getNode().getAddress()); // randomize the combination of flags set // Uses reflection to find methods in an attempt to future-proof this test against newly added flags @@ -78,7 +78,7 @@ public class NodesStatsBasicBackwardsCompatIT extends ESBackcompatTestCase { method.invoke(nsBuilder); } } - NodesStatsResponse ns = nsBuilder.execute().actionGet(); + nsBuilder.execute().actionGet(); tc.close(); } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java index 9fe83f65c45..429266c4589 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java @@ -18,9 +18,7 @@ */ package org.elasticsearch.bwcompat; -import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.Node; import static org.hamcrest.Matchers.containsString; @@ -29,7 +27,7 @@ public class RecoveryWithUnsupportedIndicesIT extends StaticIndexBackwardCompati String indexName = "unsupported-0.20.6"; logger.info("Checking static index {}", indexName); - Settings nodeSettings = prepareBackwardsDataDir(getBwcIndicesPath().resolve(indexName + ".zip"), NetworkModule.HTTP_ENABLED.getKey(), true); + Settings nodeSettings = prepareBackwardsDataDir(getBwcIndicesPath().resolve(indexName + ".zip")); try { internalCluster().startNode(nodeSettings); fail(); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RepositoryUpgradabilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RepositoryUpgradabilityIT.java index e1873850703..c29d83b4454 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RepositoryUpgradabilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RepositoryUpgradabilityIT.java @@ -26,6 +26,7 @@ import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.nio.file.DirectoryStream; import java.nio.file.Files; @@ -45,6 +46,8 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; * as blob names and repository blob formats have changed between the snapshot versions. */ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) +// this test sometimes fails in recovery when the recovery is reset, increasing the logging level to help debug +@TestLogging("indices.recovery:DEBUG") public class RepositoryUpgradabilityIT extends AbstractSnapshotIntegTestCase { /** diff --git a/core/src/test/java/org/elasticsearch/bwcompat/TransportClientBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/TransportClientBackwardsCompatibilityIT.java index b1fc1d45dca..3f0daaa3157 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/TransportClientBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/TransportClientBackwardsCompatibilityIT.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.CompositeTestCluster; import org.elasticsearch.test.ESBackcompatTestCase; +import org.elasticsearch.transport.MockTransportClient; import java.util.concurrent.ExecutionException; @@ -45,11 +46,10 @@ public class TransportClientBackwardsCompatibilityIT extends ESBackcompatTestCas CompositeTestCluster compositeTestCluster = backwardsCluster(); TransportAddress transportAddress = compositeTestCluster.externalTransportAddress(); - try(TransportClient client = TransportClient.builder().settings(settings).build()) { + try(TransportClient client = new MockTransportClient(settings)) { client.addTransportAddress(transportAddress); assertAcked(client.admin().indices().prepareCreate("test")); - ensureYellow("test"); int numDocs = iterations(10, 100); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; diff --git a/core/src/test/java/org/elasticsearch/cli/CommandTests.java b/core/src/test/java/org/elasticsearch/cli/CommandTests.java index 1f50ad4c13b..0da487e7b9d 100644 --- a/core/src/test/java/org/elasticsearch/cli/CommandTests.java +++ b/core/src/test/java/org/elasticsearch/cli/CommandTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cli; +import joptsimple.OptionException; import joptsimple.OptionSet; import org.elasticsearch.test.ESTestCase; @@ -30,7 +31,7 @@ public class CommandTests extends ESTestCase { } @Override protected void execute(Terminal terminal, OptionSet options) throws Exception { - throw new UserError(ExitCodes.DATA_ERROR, "Bad input"); + throw new UserException(ExitCodes.DATA_ERROR, "Bad input"); } } @@ -40,7 +41,7 @@ public class CommandTests extends ESTestCase { } @Override protected void execute(Terminal terminal, OptionSet options) throws Exception { - throw new UserError(ExitCodes.USAGE, "something was no good"); + throw new UserException(ExitCodes.USAGE, "something was no good"); } } @@ -87,10 +88,11 @@ public class CommandTests extends ESTestCase { MockTerminal terminal = new MockTerminal(); NoopCommand command = new NoopCommand(); String[] args = {"-v", "-s"}; - UserError e = expectThrows(UserError.class, () -> { + OptionException e = expectThrows(OptionException.class, () -> { command.mainWithoutErrorHandling(args, terminal); }); - assertTrue(e.getMessage(), e.getMessage().contains("Cannot specify -s and -v together")); + assertTrue(e.getMessage(), + e.getMessage().contains("Option(s) [v/verbose] are unavailable given other options on the command line")); } public void testSilentVerbosity() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java b/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java index 4f91d378440..f4680492028 100644 --- a/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java +++ b/core/src/test/java/org/elasticsearch/cli/MultiCommandTests.java @@ -61,7 +61,7 @@ public class MultiCommandTests extends CommandTestCase { public void testUnknownCommand() throws Exception { multiCommand.subcommands.put("something", new DummySubCommand()); - UserError e = expectThrows(UserError.class, () -> { + UserException e = expectThrows(UserException.class, () -> { execute("somethingelse"); }); assertEquals(ExitCodes.USAGE, e.exitCode); @@ -70,7 +70,7 @@ public class MultiCommandTests extends CommandTestCase { public void testMissingCommand() throws Exception { multiCommand.subcommands.put("command1", new DummySubCommand()); - UserError e = expectThrows(UserError.class, () -> { + UserException e = expectThrows(UserException.class, () -> { execute(); }); assertEquals(ExitCodes.USAGE, e.exitCode); diff --git a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java index 196b053ff82..276a43581a6 100644 --- a/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java +++ b/core/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java @@ -179,7 +179,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception t) { Throwable e = unwrap(t, InternalException.class); assertThat("expected action [" + action + "] to throw an internal exception", e, notNullValue()); assertThat(action, equalTo(((InternalException) e).action)); diff --git a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java index c085c3164a0..9d2c176dffb 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java +++ b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java @@ -187,19 +187,13 @@ abstract class FailAndRetryMockTransport imp } @Override - public Transport start() { - return null; - } + public void start() {} @Override - public Transport stop() { - return null; - } + public void stop() {} @Override - public void close() { - - } + public void close() {} @Override public Map profileBoundAddresses() { diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java index eee74b66148..282f929ff24 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java @@ -40,6 +40,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.MockTransportClient; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; @@ -64,30 +65,27 @@ public class TransportClientHeadersTests extends AbstractClientHeadersTestCase { @Override protected Client buildClient(Settings headersSettings, GenericAction[] testedActions) { - TransportClient client = TransportClient.builder() - .settings(Settings.builder() + TransportClient client = new MockTransportClient(Settings.builder() .put("client.transport.sniff", false) .put("cluster.name", "cluster1") .put("node.name", "transport_client_" + this.getTestName()) .put(headersSettings) - .build()) - .addPlugin(InternalTransportService.TestPlugin.class).build(); + .build(), InternalTransportService.TestPlugin.class); client.addTransportAddress(address); return client; } public void testWithSniffing() throws Exception { - try (TransportClient client = TransportClient.builder() - .settings(Settings.builder() + try (TransportClient client = new MockTransportClient( + Settings.builder() .put("client.transport.sniff", true) .put("cluster.name", "cluster1") .put("node.name", "transport_client_" + this.getTestName() + "_1") .put("client.transport.nodes_sampler_interval", "1s") .put(HEADER_SETTINGS) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build()) - .addPlugin(InternalTransportService.TestPlugin.class) - .build()) { + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()).build(), + InternalTransportService.TestPlugin.class)) { client.addTransportAddress(address); InternalTransportService service = (InternalTransportService) client.injector.getInstance(TransportService.class); diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java index 9cdeef2a7ff..761cc8cf0ae 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientIT.java @@ -22,14 +22,15 @@ package org.elasticsearch.client.transport; import org.elasticsearch.Version; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; import org.elasticsearch.node.Node; -import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.transport.MockTransportClient; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -51,37 +52,33 @@ public class TransportClientIT extends ESIntegTestCase { public void testNodeVersionIsUpdated() throws IOException { TransportClient client = (TransportClient) internalCluster().client(); - TransportClientNodesService nodeService = client.nodeService(); - Node node = new Node(Settings.builder() + try (Node node = new Node(Settings.builder() .put(internalCluster().getDefaultSettings()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put("node.name", "testNodeVersionIsUpdated") - .put("http.enabled", false) + .put("transport.type", "local") + .put(NetworkModule.HTTP_ENABLED.getKey(), false) .put(Node.NODE_DATA_SETTING.getKey(), false) .put("cluster.name", "foobar") - .build()); - node.start(); - try { + .build()).start()) { TransportAddress transportAddress = node.injector().getInstance(TransportService.class).boundAddress().publishAddress(); client.addTransportAddress(transportAddress); // since we force transport clients there has to be one node started that we connect to. - assertThat(nodeService.connectedNodes().size(), greaterThanOrEqualTo(1)); + assertThat(client.connectedNodes().size(), greaterThanOrEqualTo(1)); // connected nodes have updated version - for (DiscoveryNode discoveryNode : nodeService.connectedNodes()) { + for (DiscoveryNode discoveryNode : client.connectedNodes()) { assertThat(discoveryNode.getVersion(), equalTo(Version.CURRENT)); } - for (DiscoveryNode discoveryNode : nodeService.listedNodes()) { + for (DiscoveryNode discoveryNode : client.listedNodes()) { assertThat(discoveryNode.getId(), startsWith("#transport#-")); assertThat(discoveryNode.getVersion(), equalTo(Version.CURRENT.minimumCompatibilityVersion())); } - assertThat(nodeService.filteredNodes().size(), equalTo(1)); - for (DiscoveryNode discoveryNode : nodeService.filteredNodes()) { + assertThat(client.filteredNodes().size(), equalTo(1)); + for (DiscoveryNode discoveryNode : client.filteredNodes()) { assertThat(discoveryNode.getVersion(), equalTo(Version.CURRENT.minimumCompatibilityVersion())); } - } finally { - node.close(); } } @@ -94,8 +91,8 @@ public class TransportClientIT extends ESIntegTestCase { public void testThatTransportClientSettingCannotBeChanged() { Settings baseSettings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .build(); - try (TransportClient client = TransportClient.builder().settings(baseSettings).build()) { + .build(); + try (TransportClient client = new MockTransportClient(baseSettings)) { Settings settings = client.injector.getInstance(Settings.class); assertThat(Client.CLIENT_TYPE_SETTING_S.get(settings), is("transport")); } diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index 5c07f5e6f25..41891c5831f 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.client.transport; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.liveness.LivenessResponse; import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; @@ -30,7 +29,7 @@ import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; @@ -123,7 +122,7 @@ public class TransportClientNodesServiceTests extends ESTestCase { @SuppressWarnings("unchecked") public void handleResponse(T response) { LivenessResponse livenessResponse = new LivenessResponse(clusterName, - new DiscoveryNode(node.getName(), node.getId(), "liveness-hostname" + node.getId(), + new DiscoveryNode(node.getName(), node.getId(), node.getEphemeralId(), "liveness-hostname" + node.getId(), "liveness-hostaddress" + node.getId(), new LocalTransportAddress("liveness-address-" + node.getId()), node.getAttributes(), node.getRoles(), node.getVersion())); @@ -171,7 +170,7 @@ public class TransportClientNodesServiceTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { finalFailures.incrementAndGet(); finalFailure.set(e); latch.countDown(); @@ -188,7 +187,7 @@ public class TransportClientNodesServiceTests extends ESTestCase { } iteration.transportService.sendRequest(node, "action", new TestRequest(), - TransportRequestOptions.EMPTY, new BaseTransportResponseHandler() { + TransportRequestOptions.EMPTY, new TransportResponseHandler() { @Override public TestResponse newInstance() { return new TestResponse(); diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java index ed9136851b4..ab34b38f598 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientRetryIT.java @@ -28,16 +28,15 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; -import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.transport.MockTransportClient; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.concurrent.ExecutionException; -import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @ClusterScope(scope = Scope.TEST, numClientNodes = 0, supportsDedicatedMasters = false) @@ -52,13 +51,12 @@ public class TransportClientRetryIT extends ESIntegTestCase { Settings.Builder builder = Settings.builder().put("client.transport.nodes_sampler_interval", "1s") .put("node.name", "transport_client_retry_test") - .put(Node.NODE_MODE_SETTING.getKey(), internalCluster().getNodeMode()) .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), internalCluster().getClusterName()) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()); - try (TransportClient client = TransportClient.builder().settings(builder.build()).build()) { + try (TransportClient client = new MockTransportClient(builder.build())) { client.addTransportAddresses(addresses); - assertThat(client.connectedNodes().size(), equalTo(internalCluster().size())); + assertEquals(client.connectedNodes().size(), internalCluster().size()); int size = cluster().size(); //kill all nodes one by one, leaving a single master/data node at the end of the loop diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientTests.java index ec2065b67e2..2145f66b5e0 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.client.transport; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.transport.MockTransportClient; import java.util.concurrent.ExecutionException; @@ -31,11 +32,10 @@ import static org.hamcrest.object.HasToString.hasToString; public class TransportClientTests extends ESTestCase { public void testThatUsingAClosedClientThrowsAnException() throws ExecutionException, InterruptedException { - final TransportClient client = TransportClient.builder().settings(Settings.EMPTY).build(); + final TransportClient client = new MockTransportClient(Settings.EMPTY); client.close(); final IllegalStateException e = expectThrows(IllegalStateException.class, () -> client.admin().cluster().health(new ClusterHealthRequest()).get()); assertThat(e, hasToString(containsString("transport client is closed"))); } - } diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java index 72748a59986..555f23813cb 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterChangedEventTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; @@ -320,7 +320,8 @@ public class ClusterChangedEventTests extends ESTestCase { // Create a new DiscoveryNode private static DiscoveryNode newNode(final String nodeId, Set roles) { - return new DiscoveryNode(nodeId, nodeId, DummyTransportAddress.INSTANCE, Collections.emptyMap(), roles, Version.CURRENT); + return new DiscoveryNode(nodeId, nodeId, nodeId, "host", "host_address", new LocalTransportAddress("_test_" + nodeId), + Collections.emptyMap(), roles, Version.CURRENT); } // Create the metadata for a cluster state. diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterHealthIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterHealthIT.java index c4a3ecba839..d6cf029c00d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterHealthIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterHealthIT.java @@ -23,10 +23,13 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Priority; import org.elasticsearch.test.ESIntegTestCase; +import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; public class ClusterHealthIT extends ESIntegTestCase { + public void testSimpleLocalHealth() { createIndex("test"); ensureGreen(); // master should thing it's green now. @@ -68,4 +71,24 @@ public class ClusterHealthIT extends ESIntegTestCase { assertThat(healthResponse.getIndices().get("test1").getStatus(), equalTo(ClusterHealthStatus.GREEN)); assertThat(healthResponse.getIndices().size(), equalTo(1)); } -} \ No newline at end of file + + public void testHealthOnIndexCreation() throws Exception { + final AtomicBoolean finished = new AtomicBoolean(false); + Thread clusterHealthThread = new Thread() { + @Override + public void run() { + while (finished.get() == false) { + ClusterHealthResponse health = client().admin().cluster().prepareHealth().get(); + assertThat(health.getStatus(), not(equalTo(ClusterHealthStatus.RED))); + } + } + }; + clusterHealthThread.start(); + for (int i = 0; i < 10; i++) { + createIndex("test" + i); + } + finished.set(true); + clusterHealthThread.join(); + } + +} diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoTests.java new file mode 100644 index 00000000000..99afee8b2c2 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterInfoTests.java @@ -0,0 +1,88 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.cluster; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.routing.RestoreSource; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.test.ESTestCase; + +public class ClusterInfoTests extends ESTestCase { + + public void testSerialization() throws Exception { + ClusterInfo clusterInfo = new ClusterInfo( + randomDiskUsage(), randomDiskUsage(), randomShardSizes(), randomRoutingToDataPath() + ); + BytesStreamOutput output = new BytesStreamOutput(); + clusterInfo.writeTo(output); + + ClusterInfo result = new ClusterInfo(output.bytes().streamInput()); + assertEquals(clusterInfo.getNodeLeastAvailableDiskUsages(), result.getNodeLeastAvailableDiskUsages()); + assertEquals(clusterInfo.getNodeMostAvailableDiskUsages(), result.getNodeMostAvailableDiskUsages()); + assertEquals(clusterInfo.shardSizes, result.shardSizes); + assertEquals(clusterInfo.routingToDataPath, result.routingToDataPath); + } + + private static ImmutableOpenMap randomDiskUsage() { + int numEntries = randomIntBetween(0, 128); + ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(numEntries); + for (int i = 0; i < numEntries; i++) { + String key = randomAsciiOfLength(32); + DiskUsage diskUsage = new DiskUsage( + randomAsciiOfLength(4), randomAsciiOfLength(4), randomAsciiOfLength(4), + randomIntBetween(0, Integer.MAX_VALUE), randomIntBetween(0, Integer.MAX_VALUE) + ); + builder.put(key, diskUsage); + } + return builder.build(); + } + + private static ImmutableOpenMap randomShardSizes() { + int numEntries = randomIntBetween(0, 128); + ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(numEntries); + for (int i = 0; i < numEntries; i++) { + String key = randomAsciiOfLength(32); + long shardSize = randomIntBetween(0, Integer.MAX_VALUE); + builder.put(key, shardSize); + } + return builder.build(); + } + + private static ImmutableOpenMap randomRoutingToDataPath() { + int numEntries = randomIntBetween(0, 128); + ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(numEntries); + for (int i = 0; i < numEntries; i++) { + RestoreSource restoreSource = new RestoreSource(new Snapshot(randomAsciiOfLength(4), + new SnapshotId(randomAsciiOfLength(4), randomAsciiOfLength(4))), Version.CURRENT, randomAsciiOfLength(4)); + UnassignedInfo.Reason reason = randomFrom(UnassignedInfo.Reason.values()); + UnassignedInfo unassignedInfo = new UnassignedInfo(reason, randomAsciiOfLength(4)); + ShardId shardId = new ShardId(randomAsciiOfLength(32), randomAsciiOfLength(32), randomIntBetween(0, Integer.MAX_VALUE)); + ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, restoreSource, randomBoolean(), unassignedInfo); + builder.put(shardRouting, randomAsciiOfLength(32)); + } + return builder.build(); + } + +} diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index b82b5e0ba60..68a0f73eb34 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoriesMetaData; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -52,6 +53,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.ESIntegTestCase; import java.util.Collections; @@ -121,7 +123,7 @@ public class ClusterStateDiffIT extends ESIntegTestCase { Diff diffBeforeSerialization = clusterState.diff(previousClusterState); BytesStreamOutput os = new BytesStreamOutput(); diffBeforeSerialization.writeTo(os); - byte[] diffBytes = os.bytes().toBytes(); + byte[] diffBytes = BytesReference.toBytes(os.bytes()); Diff diff; try (StreamInput input = StreamInput.wrap(diffBytes)) { diff = previousClusterStateFromDiffs.readDiffFrom(input); @@ -190,9 +192,8 @@ public class ClusterStateDiffIT extends ESIntegTestCase { List nodeIds = randomSubsetOf(randomInt(clusterState.nodes().getNodes().size() - 1), clusterState.nodes().getNodes().keys().toArray(String.class)); for (String nodeId : nodeIds) { if (nodeId.startsWith("node-")) { + nodes.remove(nodeId); if (randomBoolean()) { - nodes.remove(nodeId); - } else { nodes.put(new DiscoveryNode(nodeId, new LocalTransportAddress(randomAsciiOfLength(10)), emptyMap(), emptySet(), randomVersion(random()))); } diff --git a/core/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java b/core/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java index 5e272a27459..6b99e525cb2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESTestCase; import static java.util.Collections.emptyMap; @@ -32,8 +32,9 @@ import static org.hamcrest.Matchers.equalTo; public class ClusterStateTests extends ESTestCase { public void testSupersedes() { - final DiscoveryNode node1 = new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); - final DiscoveryNode node2 = new DiscoveryNode("node2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + final Version version = Version.CURRENT; + final DiscoveryNode node1 = new DiscoveryNode("node1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), version); + final DiscoveryNode node2 = new DiscoveryNode("node2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), version); final DiscoveryNodes nodes = DiscoveryNodes.builder().put(node1).put(node2).build(); ClusterName name = ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY); ClusterState noMaster1 = ClusterState.builder(name).version(randomInt(5)).nodes(nodes).build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java index 9210e2a56bd..2073235af98 100644 --- a/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java @@ -23,8 +23,6 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -33,7 +31,7 @@ import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; @@ -200,11 +198,11 @@ public class DiskUsageTests extends ESTestCase { new FsInfo.Path("/most", "/dev/sda", 100, 90, 80), }; List nodeStats = Arrays.asList( - new NodeStats(new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, + new NodeStats(new DiscoveryNode("node_1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null,new FsInfo(0, null, node1FSInfo), null,null,null,null,null, null), - new NodeStats(new DiscoveryNode("node_2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, + new NodeStats(new DiscoveryNode("node_2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, null, node2FSInfo), null,null,null,null,null, null), - new NodeStats(new DiscoveryNode("node_3", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, + new NodeStats(new DiscoveryNode("node_3", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, null, node3FSInfo), null,null,null,null,null, null) ); InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvaiableUsages, newMostAvaiableUsages); @@ -241,11 +239,11 @@ public class DiskUsageTests extends ESTestCase { new FsInfo.Path("/least", "/dev/sda", 10, -8, 0), }; List nodeStats = Arrays.asList( - new NodeStats(new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, + new NodeStats(new DiscoveryNode("node_1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null,new FsInfo(0, null, node1FSInfo), null,null,null,null,null, null), - new NodeStats(new DiscoveryNode("node_2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, + new NodeStats(new DiscoveryNode("node_2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, null, node2FSInfo), null,null,null,null,null, null), - new NodeStats(new DiscoveryNode("node_3", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), 0, + new NodeStats(new DiscoveryNode("node_3", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), 0, null,null,null,null,null, new FsInfo(0, null, node3FSInfo), null,null,null,null,null, null) ); InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvailableUsages, newMostAvailableUsages); diff --git a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index 61bb898acc2..aad2aa212a1 100644 --- a/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -395,8 +395,8 @@ public class MinimumMasterNodesIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - failure.set(t); + public void onFailure(String source, Exception e) { + failure.set(e); latch.countDown(); } }); diff --git a/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java index 5eb5a34c44f..b0bc3ee2e4e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.test.ESTestCase; @@ -64,7 +64,7 @@ public class NodeConnectionsServiceTests extends ESTestCase { List nodes = new ArrayList<>(); for (int i = randomIntBetween(20, 50); i > 0; i--) { Set roles = new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values()))); - nodes.add(new DiscoveryNode("node_" + i, "" + i, DummyTransportAddress.INSTANCE, Collections.emptyMap(), + nodes.add(new DiscoveryNode("node_" + i, "" + i, LocalTransportAddress.buildUnique(), Collections.emptyMap(), roles, Version.CURRENT)); } return nodes; @@ -253,18 +253,12 @@ public class NodeConnectionsServiceTests extends ESTestCase { } @Override - public Transport start() { - return null; - } + public void start() {} @Override - public Transport stop() { - return null; - } + public void stop() {} @Override - public void close() { - - } + public void close() {} } } diff --git a/core/src/test/java/org/elasticsearch/cluster/SimpleDataNodesIT.java b/core/src/test/java/org/elasticsearch/cluster/SimpleDataNodesIT.java index 43aa088a3b9..f411e00468e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/SimpleDataNodesIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/SimpleDataNodesIT.java @@ -21,6 +21,7 @@ package org.elasticsearch.cluster; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.Requests; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; @@ -40,7 +41,7 @@ import static org.hamcrest.Matchers.equalTo; public class SimpleDataNodesIT extends ESIntegTestCase { public void testDataNodes() throws Exception { internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false).build()); - client().admin().indices().create(createIndexRequest("test")).actionGet(); + client().admin().indices().create(createIndexRequest("test").waitForActiveShards(ActiveShardCount.NONE)).actionGet(); try { client().index(Requests.indexRequest("test").type("type1").id("1").source(source("1", "test")).timeout(timeValueSeconds(1))).actionGet(); fail("no allocation should happen"); diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java index 1faac874114..d12b6b563b3 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -41,6 +40,7 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; @@ -305,7 +305,8 @@ public class ShardFailedClusterStateTaskExecutorTests extends ESAllocationTestCa return randomSubsetOf(1, shards.toArray(new ShardRouting[0])).get(0); } else { return - TestShardRouting.newShardRouting(shardRouting.shardId(), DiscoveryNodeService.generateNodeId(Settings.EMPTY), randomBoolean(), randomFrom(ShardRoutingState.values())); + TestShardRouting.newShardRouting(shardRouting.shardId(), UUIDs.randomBase64UUID(random()), randomBoolean(), + randomFrom(ShardRoutingState.values())); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index 7f0ac1a6e45..d387d6f7d43 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -148,7 +148,7 @@ public class ShardStateActionTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { success.set(false); latch.countDown(); assert false; @@ -196,7 +196,7 @@ public class ShardStateActionTests extends ESTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { success.set(false); latch.countDown(); assert false; @@ -245,9 +245,9 @@ public class ShardStateActionTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { success.set(false); - throwable.set(t); + throwable.set(e); latch.countDown(); assert false; } @@ -281,7 +281,7 @@ public class ShardStateActionTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { failure.set(true); } }); @@ -313,7 +313,7 @@ public class ShardStateActionTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { success.set(false); latch.countDown(); assert false; @@ -348,8 +348,8 @@ public class ShardStateActionTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { - failure.set(t); + public void onFailure(Exception e) { + failure.set(e); latch.countDown(); } }); @@ -401,7 +401,7 @@ public class ShardStateActionTests extends ESTestCase { } } - private Throwable getSimulatedFailure() { + private Exception getSimulatedFailure() { return new CorruptIndexException("simulated", (String) null); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java index c86535e40c5..70af5808241 100644 --- a/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/allocation/ClusterRerouteIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.cluster.allocation; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -91,7 +92,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { final String node_2 = nodesIds.get(1); logger.info("--> create an index with 1 shard, 1 replica, nothing should allocate"); - client().admin().indices().prepareCreate("test") + client().admin().indices().prepareCreate("test").setWaitForActiveShards(ActiveShardCount.NONE) .setSettings(Settings.builder().put("index.number_of_shards", 1)) .execute().actionGet(); @@ -203,7 +204,7 @@ public class ClusterRerouteIT extends ESIntegTestCase { assertThat(healthResponse.isTimedOut(), equalTo(false)); logger.info("--> create an index with 1 shard, 1 replica, nothing should allocate"); - client().admin().indices().prepareCreate("test") + client().admin().indices().prepareCreate("test").setWaitForActiveShards(ActiveShardCount.NONE) .setSettings(Settings.builder().put("index.number_of_shards", 1)) .execute().actionGet(); @@ -253,14 +254,13 @@ public class ClusterRerouteIT extends ESIntegTestCase { assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).getId()).iterator().next().state(), equalTo(ShardRoutingState.INITIALIZING)); - healthResponse = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); - assertThat(healthResponse.isTimedOut(), equalTo(false)); - logger.info("--> get the state, verify shard 1 primary allocated"); - state = client().admin().cluster().prepareState().execute().actionGet().getState(); - assertThat(state.getRoutingNodes().unassigned().size(), equalTo(1)); - assertThat(state.getRoutingNodes().node(state.nodes().resolveNode(node_1).getId()).iterator().next().state(), equalTo(ShardRoutingState.STARTED)); - + final String nodeToCheck = node_1; + assertBusy(() -> { + ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); + String nodeId = clusterState.nodes().resolveNode(nodeToCheck).getId(); + assertThat(clusterState.getRoutingNodes().node(nodeId).iterator().next().state(), equalTo(ShardRoutingState.STARTED)); + }); } public void testRerouteExplain() { diff --git a/core/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java b/core/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java index 7f2d0828128..a7fe1b918c0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/block/ClusterBlockTests.java @@ -55,7 +55,7 @@ public class ClusterBlockTests extends ESTestCase { out.setVersion(version); clusterBlock.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); in.setVersion(version); ClusterBlock result = ClusterBlock.readClusterBlock(in); diff --git a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java index fd1e3e62466..0b0ea6b3f91 100644 --- a/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/health/ClusterStateHealthTests.java @@ -18,6 +18,8 @@ */ package org.elasticsearch.cluster.health; +import com.carrotsearch.hppc.cursors.IntObjectCursor; +import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; @@ -32,12 +34,18 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.RoutingTableGenerator; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.collect.ImmutableOpenIntMap; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.gateway.NoopGatewayAllocator; import org.elasticsearch.test.transport.CapturingTransport; @@ -50,7 +58,10 @@ import org.junit.Before; import org.junit.BeforeClass; import java.io.IOException; +import java.util.ArrayList; import java.util.HashSet; +import java.util.List; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -65,14 +76,13 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; public class ClusterStateHealthTests extends ESTestCase { private final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(Settings.EMPTY); - private static ThreadPool threadPool; private ClusterService clusterService; private TransportService transportService; @BeforeClass - public static void beforeClass() { + public static void setupThreadPool() { threadPool = new TestThreadPool("ClusterStateHealthTests"); } @@ -94,7 +104,7 @@ public class ClusterStateHealthTests extends ESTestCase { } @AfterClass - public static void afterClass() { + public static void terminateThreadPool() { ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); threadPool = null; } @@ -118,8 +128,8 @@ public class ClusterStateHealthTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.warn("unexpected failure", t); + public void onFailure(String source, Exception e) { + logger.warn("unexpected failure", e); } }); @@ -129,7 +139,6 @@ public class ClusterStateHealthTests extends ESTestCase { TransportClusterHealthAction action = new TransportClusterHealthAction(Settings.EMPTY, transportService, clusterService, threadPool, new ActionFilters(new HashSet<>()), indexNameExpressionResolver, NoopGatewayAllocator.INSTANCE); PlainActionFuture listener = new PlainActionFuture<>(); - action.execute(new ClusterHealthRequest(), listener); assertFalse(listener.isDone()); @@ -138,7 +147,6 @@ public class ClusterStateHealthTests extends ESTestCase { listener.get(); } - public void testClusterHealth() throws IOException { RoutingTableGenerator routingTableGenerator = new RoutingTableGenerator(); RoutingTableGenerator.ShardCounter counter = new RoutingTableGenerator.ShardCounter(); @@ -157,24 +165,376 @@ public class ClusterStateHealthTests extends ESTestCase { metaData.put(indexMetaData, true); routingTable.add(indexRoutingTable); } - ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).metaData(metaData).routingTable(routingTable.build()).build(); - String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, IndicesOptions.strictExpand(), (String[]) null); + ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metaData(metaData) + .routingTable(routingTable.build()) + .build(); + String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames( + clusterState, IndicesOptions.strictExpand(), (String[]) null + ); ClusterStateHealth clusterStateHealth = new ClusterStateHealth(clusterState, concreteIndices); logger.info("cluster status: {}, expected {}", clusterStateHealth.getStatus(), counter.status()); clusterStateHealth = maybeSerialize(clusterStateHealth); assertClusterHealth(clusterStateHealth, counter); } + public void testClusterHealthOnIndexCreation() { + final String indexName = "test-idx"; + final String[] indices = new String[] { indexName }; + final List clusterStates = simulateIndexCreationStates(indexName, false); + for (int i = 0; i < clusterStates.size(); i++) { + // make sure cluster health is always YELLOW, up until the last state where it should be GREEN + final ClusterState clusterState = clusterStates.get(i); + final ClusterStateHealth health = new ClusterStateHealth(clusterState, indices); + if (i < clusterStates.size() - 1) { + assertThat(health.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); + } else { + assertThat(health.getStatus(), equalTo(ClusterHealthStatus.GREEN)); + } + } + } + + public void testClusterHealthOnIndexCreationWithFailedAllocations() { + final String indexName = "test-idx"; + final String[] indices = new String[] { indexName }; + final List clusterStates = simulateIndexCreationStates(indexName, true); + for (int i = 0; i < clusterStates.size(); i++) { + // make sure cluster health is YELLOW up until the final cluster state, which contains primary shard + // failed allocations that should make the cluster health RED + final ClusterState clusterState = clusterStates.get(i); + final ClusterStateHealth health = new ClusterStateHealth(clusterState, indices); + if (i < clusterStates.size() - 1) { + assertThat(health.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); + } else { + assertThat(health.getStatus(), equalTo(ClusterHealthStatus.RED)); + } + } + } + + public void testClusterHealthOnClusterRecovery() { + final String indexName = "test-idx"; + final String[] indices = new String[] { indexName }; + final List clusterStates = simulateClusterRecoveryStates(indexName, false, false); + for (int i = 0; i < clusterStates.size(); i++) { + // make sure cluster health is YELLOW up until the final cluster state, when it turns GREEN + final ClusterState clusterState = clusterStates.get(i); + final ClusterStateHealth health = new ClusterStateHealth(clusterState, indices); + if (i < clusterStates.size() - 1) { + assertThat(health.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); + } else { + assertThat(health.getStatus(), equalTo(ClusterHealthStatus.GREEN)); + } + } + } + + public void testClusterHealthOnClusterRecoveryWithFailures() { + final String indexName = "test-idx"; + final String[] indices = new String[] { indexName }; + final List clusterStates = simulateClusterRecoveryStates(indexName, false, true); + for (int i = 0; i < clusterStates.size(); i++) { + // make sure cluster health is YELLOW up until the final cluster state, which contains primary shard + // failed allocations that should make the cluster health RED + final ClusterState clusterState = clusterStates.get(i); + final ClusterStateHealth health = new ClusterStateHealth(clusterState, indices); + if (i < clusterStates.size() - 1) { + assertThat(health.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); + } else { + assertThat(health.getStatus(), equalTo(ClusterHealthStatus.RED)); + } + } + } + + public void testClusterHealthOnClusterRecoveryWithPreviousAllocationIds() { + final String indexName = "test-idx"; + final String[] indices = new String[] { indexName }; + final List clusterStates = simulateClusterRecoveryStates(indexName, true, false); + for (int i = 0; i < clusterStates.size(); i++) { + // because there were previous allocation ids, we should be RED until the primaries are started, + // then move to YELLOW, and the last state should be GREEN when all shards have been started + final ClusterState clusterState = clusterStates.get(i); + final ClusterStateHealth health = new ClusterStateHealth(clusterState, indices); + if (i < clusterStates.size() - 1) { + // if the inactive primaries are due solely to recovery (not failed allocation or previously being allocated), + // then cluster health is YELLOW, otherwise RED + if (primaryInactiveDueToRecovery(indexName, clusterState)) { + assertThat(health.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); + } else { + assertThat(health.getStatus(), equalTo(ClusterHealthStatus.RED)); + } + } else { + assertThat(health.getStatus(), equalTo(ClusterHealthStatus.GREEN)); + } + } + } + + public void testClusterHealthOnClusterRecoveryWithPreviousAllocationIdsAndAllocationFailures() { + final String indexName = "test-idx"; + final String[] indices = new String[] { indexName }; + for (final ClusterState clusterState : simulateClusterRecoveryStates(indexName, true, true)) { + final ClusterStateHealth health = new ClusterStateHealth(clusterState, indices); + // if the inactive primaries are due solely to recovery (not failed allocation or previously being allocated) + // then cluster health is YELLOW, otherwise RED + if (primaryInactiveDueToRecovery(indexName, clusterState)) { + assertThat(health.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); + } else { + assertThat(health.getStatus(), equalTo(ClusterHealthStatus.RED)); + } + } + } + ClusterStateHealth maybeSerialize(ClusterStateHealth clusterStateHealth) throws IOException { if (randomBoolean()) { BytesStreamOutput out = new BytesStreamOutput(); clusterStateHealth.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); clusterStateHealth = new ClusterStateHealth(in); } return clusterStateHealth; } + private List simulateIndexCreationStates(final String indexName, final boolean withPrimaryAllocationFailures) { + final int numberOfShards = randomIntBetween(1, 5); + final int numberOfReplicas = randomIntBetween(1, numberOfShards); + // initial index creation and new routing table info + final IndexMetaData indexMetaData = IndexMetaData.builder(indexName) + .settings(settings(Version.CURRENT) + .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID())) + .numberOfShards(numberOfShards) + .numberOfReplicas(numberOfReplicas) + .build(); + final MetaData metaData = MetaData.builder().put(indexMetaData, true).build(); + final RoutingTable routingTable = RoutingTable.builder().addAsNew(indexMetaData).build(); + + ClusterState clusterState = ClusterState.builder(new ClusterName("test_cluster")) + .metaData(metaData) + .routingTable(routingTable) + .build(); + return generateClusterStates(clusterState, indexName, numberOfReplicas, withPrimaryAllocationFailures); + } + + private List simulateClusterRecoveryStates(final String indexName, + final boolean withPreviousAllocationIds, + final boolean withPrimaryAllocationFailures) { + final int numberOfShards = randomIntBetween(1, 5); + final int numberOfReplicas = randomIntBetween(1, numberOfShards); + // initial index creation and new routing table info + IndexMetaData indexMetaData = IndexMetaData.builder(indexName) + .settings(settings(Version.CURRENT) + .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID())) + .numberOfShards(numberOfShards) + .numberOfReplicas(numberOfReplicas) + .state(IndexMetaData.State.OPEN) + .build(); + if (withPreviousAllocationIds) { + final IndexMetaData.Builder idxMetaWithAllocationIds = IndexMetaData.builder(indexMetaData); + boolean atLeastOne = false; + for (int i = 0; i < numberOfShards; i++) { + if (atLeastOne == false || randomBoolean()) { + idxMetaWithAllocationIds.putActiveAllocationIds(i, Sets.newHashSet(UUIDs.randomBase64UUID())); + atLeastOne = true; + } + } + indexMetaData = idxMetaWithAllocationIds.build(); + } + final MetaData metaData = MetaData.builder().put(indexMetaData, true).build(); + final RoutingTable routingTable = RoutingTable.builder().addAsRecovery(indexMetaData).build(); + + ClusterState clusterState = ClusterState.builder(new ClusterName("test_cluster")) + .metaData(metaData) + .routingTable(routingTable) + .build(); + return generateClusterStates(clusterState, indexName, numberOfReplicas, withPrimaryAllocationFailures); + } + + private List generateClusterStates(final ClusterState originalClusterState, + final String indexName, + final int numberOfReplicas, + final boolean withPrimaryAllocationFailures) { + // generate random node ids + final List nodeIds = new ArrayList<>(); + final int numNodes = randomIntBetween(numberOfReplicas + 1, 10); + for (int i = 0; i < numNodes; i++) { + nodeIds.add(randomAsciiOfLength(8)); + } + + final List clusterStates = new ArrayList<>(); + clusterStates.add(originalClusterState); + ClusterState clusterState = originalClusterState; + + // initialize primaries + RoutingTable routingTable = originalClusterState.routingTable(); + IndexRoutingTable indexRoutingTable = routingTable.index(indexName); + IndexRoutingTable.Builder newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex()); + for (final ObjectCursor shardEntry : indexRoutingTable.getShards().values()) { + final IndexShardRoutingTable shardRoutingTable = shardEntry.value; + for (final ShardRouting shardRouting : shardRoutingTable.getShards()) { + if (shardRouting.primary()) { + newIndexRoutingTable.addShard( + shardRouting.initialize(nodeIds.get(randomIntBetween(0, numNodes - 1)), null, shardRouting.getExpectedShardSize()) + ); + } else { + newIndexRoutingTable.addShard(shardRouting); + } + } + } + routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + clusterStates.add(clusterState); + + // some primaries started + indexRoutingTable = routingTable.index(indexName); + newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex()); + ImmutableOpenIntMap.Builder> allocationIds = ImmutableOpenIntMap.>builder(); + for (final ObjectCursor shardEntry : indexRoutingTable.getShards().values()) { + final IndexShardRoutingTable shardRoutingTable = shardEntry.value; + for (final ShardRouting shardRouting : shardRoutingTable.getShards()) { + if (shardRouting.primary() && randomBoolean()) { + final ShardRouting newShardRouting = shardRouting.moveToStarted(); + allocationIds.fPut(newShardRouting.getId(), Sets.newHashSet(newShardRouting.allocationId().getId())); + newIndexRoutingTable.addShard(newShardRouting); + } else { + newIndexRoutingTable.addShard(shardRouting); + } + } + } + routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build(); + IndexMetaData.Builder idxMetaBuilder = IndexMetaData.builder(clusterState.metaData().index(indexName)); + for (final IntObjectCursor> entry : allocationIds.build()) { + idxMetaBuilder.putActiveAllocationIds(entry.key, entry.value); + } + MetaData.Builder metaDataBuilder = MetaData.builder(clusterState.metaData()).put(idxMetaBuilder); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).metaData(metaDataBuilder).build(); + clusterStates.add(clusterState); + + if (withPrimaryAllocationFailures) { + boolean alreadyFailedPrimary = false; + // some primaries failed to allocate + indexRoutingTable = routingTable.index(indexName); + newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex()); + for (final ObjectCursor shardEntry : indexRoutingTable.getShards().values()) { + final IndexShardRoutingTable shardRoutingTable = shardEntry.value; + for (final ShardRouting shardRouting : shardRoutingTable.getShards()) { + if (shardRouting.primary() && (shardRouting.started() == false || alreadyFailedPrimary == false)) { + newIndexRoutingTable.addShard(shardRouting.moveToUnassigned( + new UnassignedInfo(UnassignedInfo.Reason.ALLOCATION_FAILED, "unlucky shard"))); + alreadyFailedPrimary = true; + } else { + newIndexRoutingTable.addShard(shardRouting); + } + } + } + routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build(); + clusterStates.add(ClusterState.builder(clusterState).routingTable(routingTable).build()); + return clusterStates; + } + + // all primaries started + indexRoutingTable = routingTable.index(indexName); + newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex()); + allocationIds = ImmutableOpenIntMap.>builder(); + for (final ObjectCursor shardEntry : indexRoutingTable.getShards().values()) { + final IndexShardRoutingTable shardRoutingTable = shardEntry.value; + for (final ShardRouting shardRouting : shardRoutingTable.getShards()) { + if (shardRouting.primary() && shardRouting.started() == false) { + final ShardRouting newShardRouting = shardRouting.moveToStarted(); + allocationIds.fPut(newShardRouting.getId(), Sets.newHashSet(newShardRouting.allocationId().getId())); + newIndexRoutingTable.addShard(newShardRouting); + } else { + newIndexRoutingTable.addShard(shardRouting); + } + } + } + routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build(); + idxMetaBuilder = IndexMetaData.builder(clusterState.metaData().index(indexName)); + for (final IntObjectCursor> entry : allocationIds.build()) { + idxMetaBuilder.putActiveAllocationIds(entry.key, entry.value); + } + metaDataBuilder = MetaData.builder(clusterState.metaData()).put(idxMetaBuilder); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).metaData(metaDataBuilder).build(); + clusterStates.add(clusterState); + + // initialize replicas + indexRoutingTable = routingTable.index(indexName); + newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex()); + for (final ObjectCursor shardEntry : indexRoutingTable.getShards().values()) { + final IndexShardRoutingTable shardRoutingTable = shardEntry.value; + for (final ShardRouting shardRouting : shardRoutingTable.getShards()) { + if (shardRouting.primary() == false) { + // give the replica a different node id than the primary + final String primaryNodeId = shardRoutingTable.primaryShard().currentNodeId(); + String replicaNodeId; + do { + replicaNodeId = nodeIds.get(randomIntBetween(0, numNodes - 1)); + } while (primaryNodeId.equals(replicaNodeId)); + newIndexRoutingTable.addShard( + shardRouting.initialize(replicaNodeId, null, shardRouting.getExpectedShardSize()) + ); + } else { + newIndexRoutingTable.addShard(shardRouting); + } + } + } + routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build(); + clusterStates.add(ClusterState.builder(clusterState).routingTable(routingTable).build()); + + // some replicas started + indexRoutingTable = routingTable.index(indexName); + newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex()); + for (final ObjectCursor shardEntry : indexRoutingTable.getShards().values()) { + final IndexShardRoutingTable shardRoutingTable = shardEntry.value; + for (final ShardRouting shardRouting : shardRoutingTable.getShards()) { + if (shardRouting.primary() == false && randomBoolean()) { + newIndexRoutingTable.addShard(shardRouting.moveToStarted()); + } else { + newIndexRoutingTable.addShard(shardRouting); + } + } + } + routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build(); + clusterStates.add(ClusterState.builder(clusterState).routingTable(routingTable).build()); + + // all replicas started + boolean replicaStateChanged = false; + indexRoutingTable = routingTable.index(indexName); + newIndexRoutingTable = IndexRoutingTable.builder(indexRoutingTable.getIndex()); + for (final ObjectCursor shardEntry : indexRoutingTable.getShards().values()) { + final IndexShardRoutingTable shardRoutingTable = shardEntry.value; + for (final ShardRouting shardRouting : shardRoutingTable.getShards()) { + if (shardRouting.primary() == false && shardRouting.started() == false) { + newIndexRoutingTable.addShard(shardRouting.moveToStarted()); + replicaStateChanged = true; + } else { + newIndexRoutingTable.addShard(shardRouting); + } + } + } + // all of the replicas may have moved to started in the previous phase already + if (replicaStateChanged) { + routingTable = RoutingTable.builder(routingTable).add(newIndexRoutingTable).build(); + clusterStates.add(ClusterState.builder(clusterState).routingTable(routingTable).build()); + } + + return clusterStates; + } + + // returns true if the inactive primaries in the index are only due to cluster recovery + // (not because of allocation failure or previously having allocation ids assigned) + private boolean primaryInactiveDueToRecovery(final String indexName, final ClusterState clusterState) { + for (final IntObjectCursor shardRouting : clusterState.routingTable().index(indexName).shards()) { + final ShardRouting primaryShard = shardRouting.value.primaryShard(); + if (primaryShard.active() == false) { + if (clusterState.metaData().index(indexName).activeAllocationIds(shardRouting.key).isEmpty() == false) { + return false; + } + if (primaryShard.unassignedInfo() != null && + primaryShard.unassignedInfo().getReason() == UnassignedInfo.Reason.ALLOCATION_FAILED) { + return false; + } + } + } + return true; + } + private void assertClusterHealth(ClusterStateHealth clusterStateHealth, RoutingTableGenerator.ShardCounter counter) { assertThat(clusterStateHealth.getStatus(), equalTo(counter.status())); assertThat(clusterStateHealth.getActiveShards(), equalTo(counter.active)); diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java index aec701052fb..8dd950ba8e6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java @@ -60,8 +60,7 @@ public class IndexGraveyardTests extends ESTestCase { final IndexGraveyard graveyard = createRandom(); final BytesStreamOutput out = new BytesStreamOutput(); graveyard.writeTo(out); - final ByteBufferStreamInput in = new ByteBufferStreamInput(ByteBuffer.wrap(out.bytes().toBytes())); - assertThat(IndexGraveyard.fromStream(in), equalTo(graveyard)); + assertThat(IndexGraveyard.fromStream(out.bytes().streamInput()), equalTo(graveyard)); } public void testXContent() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java index 0c9827587ea..5fef33be388 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java @@ -69,7 +69,7 @@ public class IndexMetaDataTests extends ESTestCase { final BytesStreamOutput out = new BytesStreamOutput(); metaData.writeTo(out); - IndexMetaData deserialized = IndexMetaData.PROTO.readFrom(StreamInput.wrap(out.bytes())); + IndexMetaData deserialized = IndexMetaData.PROTO.readFrom(out.bytes().streamInput()); assertEquals(metaData, deserialized); assertEquals(metaData.hashCode(), deserialized.hashCode()); diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java index 04d27020273..f6f7aaf3228 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexServiceTests.java @@ -34,7 +34,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.IndexAlreadyExistsException; import org.elasticsearch.indices.InvalidIndexNameException; @@ -181,7 +181,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { } private DiscoveryNode newNode(String nodeId) { - return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE, emptyMap(), + return new DiscoveryNode(nodeId, LocalTransportAddress.buildUnique(), emptyMap(), Collections.unmodifiableSet(new HashSet<>(Arrays.asList(DiscoveryNode.Role.MASTER, DiscoveryNode.Role.DATA))), Version.CURRENT); } @@ -216,6 +216,7 @@ public class MetaDataCreateIndexServiceTests extends ESTestCase { new HashSet<>(), null, null, + null, null); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index e1e3a39122c..cf040fb3c7f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.Version; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -34,7 +33,6 @@ import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.nio.ByteBuffer; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -185,8 +183,7 @@ public class MetaDataTests extends ESTestCase { final MetaData originalMeta = MetaData.builder().indexGraveyard(graveyard).build(); final BytesStreamOutput out = new BytesStreamOutput(); originalMeta.writeTo(out); - final ByteBufferStreamInput in = new ByteBufferStreamInput(ByteBuffer.wrap(out.bytes().toBytes())); - final MetaData fromStreamMeta = MetaData.PROTO.readFrom(in); + final MetaData fromStreamMeta = MetaData.PROTO.readFrom(out.bytes().streamInput()); assertThat(fromStreamMeta.indexGraveyard(), equalTo(fromStreamMeta.indexGraveyard())); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java index 38aa73a9935..59f058a95fb 100644 --- a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeFiltersTests.java @@ -21,8 +21,8 @@ package org.elasticsearch.cluster.node; import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -64,10 +64,11 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build(); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("name1", "id1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node = new DiscoveryNode("name1", "id1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), + Version.CURRENT); assertThat(filters.match(node), equalTo(true)); - node = new DiscoveryNode("name2", "id2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + node = new DiscoveryNode("name2", "id2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); assertThat(filters.match(node), equalTo(false)); } @@ -77,10 +78,11 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build(); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("name1", "id1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node = new DiscoveryNode("name1", "id1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), + Version.CURRENT); assertThat(filters.match(node), equalTo(true)); - node = new DiscoveryNode("name2", "id2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + node = new DiscoveryNode("name2", "id2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); assertThat(filters.match(node), equalTo(false)); } @@ -91,13 +93,14 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("name1", "id1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + final Version version = Version.CURRENT; + DiscoveryNode node = new DiscoveryNode("name1", "id1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), version); assertThat(filters.match(node), equalTo(true)); - node = new DiscoveryNode("name2", "id2", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + node = new DiscoveryNode("name2", "id2", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), version); assertThat(filters.match(node), equalTo(true)); - node = new DiscoveryNode("name3", "id3", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + node = new DiscoveryNode("name3", "id3", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), version); assertThat(filters.match(node), equalTo(false)); } @@ -111,7 +114,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { Map attributes = new HashMap<>(); attributes.put("tag", "A"); attributes.put("group", "B"); - DiscoveryNode node = new DiscoveryNode("name1", "id1", DummyTransportAddress.INSTANCE, + DiscoveryNode node = new DiscoveryNode("name1", "id1", LocalTransportAddress.buildUnique(), attributes, emptySet(), Version.CURRENT); assertThat(filters.match(node), equalTo(true)); @@ -119,7 +122,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { attributes.put("tag", "A"); attributes.put("group", "B"); attributes.put("name", "X"); - node = new DiscoveryNode("name2", "id2", DummyTransportAddress.INSTANCE, + node = new DiscoveryNode("name2", "id2", LocalTransportAddress.buildUnique(), attributes, emptySet(), Version.CURRENT); assertThat(filters.match(node), equalTo(true)); @@ -127,11 +130,11 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { attributes.put("tag", "A"); attributes.put("group", "F"); attributes.put("name", "X"); - node = new DiscoveryNode("name3", "id3", DummyTransportAddress.INSTANCE, + node = new DiscoveryNode("name3", "id3", LocalTransportAddress.buildUnique(), attributes, emptySet(), Version.CURRENT); assertThat(filters.match(node), equalTo(false)); - node = new DiscoveryNode("name4", "id4", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + node = new DiscoveryNode("name4", "id4", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); assertThat(filters.match(node), equalTo(false)); } @@ -141,7 +144,8 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build(); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("name1", "id1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node = new DiscoveryNode("name1", "id1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), + Version.CURRENT); assertThat(filters.match(node), equalTo(true)); } @@ -152,7 +156,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); } @@ -163,7 +167,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(false)); } @@ -174,7 +178,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(false)); } @@ -185,7 +189,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); } @@ -196,7 +200,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); } @@ -207,7 +211,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); } @@ -218,7 +222,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(AND, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(false)); } @@ -229,7 +233,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); } @@ -240,7 +244,7 @@ public class DiscoveryNodeFiltersTests extends ESTestCase { .build()); DiscoveryNodeFilters filters = DiscoveryNodeFilters.buildFromSettings(OR, "xxx.", settings); - DiscoveryNode node = new DiscoveryNode("", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); + DiscoveryNode node = new DiscoveryNode("", "", "", "", "192.1.1.54", localAddress, singletonMap("tag", "A"), emptySet(), null); assertThat(filters.match(node), equalTo(true)); } diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeServiceTests.java deleted file mode 100644 index fb38a428a76..00000000000 --- a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeServiceTests.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.node; - -import org.elasticsearch.Version; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; -import org.elasticsearch.test.ESTestCase; - -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import static org.hamcrest.CoreMatchers.equalTo; - -public class DiscoveryNodeServiceTests extends ESTestCase { - - public void testBuildLocalNode() { - Map expectedAttributes = new HashMap<>(); - int numCustomSettings = randomIntBetween(0, 5); - Settings.Builder builder = Settings.builder(); - for (int i = 0; i < numCustomSettings; i++) { - builder.put("node.attr.attr" + i, "value" + i); - expectedAttributes.put("attr" + i, "value" + i); - } - Set selectedRoles = new HashSet<>(); - for (DiscoveryNode.Role role : DiscoveryNode.Role.values()) { - if (randomBoolean()) { - //test default true for every role - selectedRoles.add(role); - } else { - boolean isRoleEnabled = randomBoolean(); - builder.put("node." + role.getRoleName(), isRoleEnabled); - if (isRoleEnabled) { - selectedRoles.add(role); - } - } - } - DiscoveryNodeService discoveryNodeService = new DiscoveryNodeService(builder.build()); - DiscoveryNode discoveryNode = discoveryNodeService.buildLocalNode(DummyTransportAddress.INSTANCE); - assertThat(discoveryNode.getRoles(), equalTo(selectedRoles)); - assertThat(discoveryNode.getAttributes(), equalTo(expectedAttributes)); - } - - public void testBuildAttributesWithCustomAttributeServiceProvider() { - Map expectedAttributes = new HashMap<>(); - int numCustomSettings = randomIntBetween(0, 5); - Settings.Builder builder = Settings.builder(); - for (int i = 0; i < numCustomSettings; i++) { - builder.put("node.attr.attr" + i, "value" + i); - expectedAttributes.put("attr" + i, "value" + i); - } - DiscoveryNodeService discoveryNodeService = new DiscoveryNodeService(builder.build()); - int numCustomAttributes = randomIntBetween(0, 5); - Map customAttributes = new HashMap<>(); - for (int i = 0; i < numCustomAttributes; i++) { - customAttributes.put("custom-" + randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(1, 10)); - } - expectedAttributes.putAll(customAttributes); - discoveryNodeService.addCustomAttributeProvider(() -> customAttributes); - - DiscoveryNode discoveryNode = discoveryNodeService.buildLocalNode(DummyTransportAddress.INSTANCE); - assertThat(discoveryNode.getAttributes(), equalTo(expectedAttributes)); - } -} diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java index 984cd31b7a0..ec741a908c5 100644 --- a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java @@ -19,8 +19,9 @@ package org.elasticsearch.cluster.node; +import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.elasticsearch.Version; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -30,10 +31,15 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.nullValue; public class DiscoveryNodesTests extends ESTestCase { @@ -53,7 +59,7 @@ public class DiscoveryNodesTests extends ESTestCase { DiscoveryNode resolvedNode = discoveryNodes.resolveNode(nodeSelector.selector); assertThat(matchingNodeIds.size(), equalTo(1)); assertThat(resolvedNode.getId(), equalTo(matchingNodeIds.iterator().next())); - } catch(IllegalArgumentException e) { + } catch (IllegalArgumentException e) { if (matchingNodeIds.size() == 0) { assertThat(e.getMessage(), equalTo("failed to resolve [" + nodeSelector.selector + "], no matching nodes")); } else if (matchingNodeIds.size() > 1) { @@ -98,26 +104,98 @@ public class DiscoveryNodesTests extends ESTestCase { assertThat(resolvedNodesIds, equalTo(expectedNodesIds)); } - private static DiscoveryNodes buildDiscoveryNodes() { - int numNodes = randomIntBetween(1, 10); - DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); + public void testDeltas() { + Set nodesA = new HashSet<>(); + nodesA.addAll(randomNodes(1 + randomInt(10))); + Set nodesB = new HashSet<>(); + nodesB.addAll(randomNodes(1 + randomInt(5))); + for (DiscoveryNode node : randomSubsetOf(nodesA)) { + if (randomBoolean()) { + // change an attribute + Map attrs = new HashMap<>(node.getAttributes()); + attrs.put("new", "new"); + node = new DiscoveryNode(node.getName(), node.getId(), node.getAddress(), attrs, node.getRoles(), node.getVersion()); + } + nodesB.add(node); + } + + DiscoveryNode masterA = randomBoolean() ? null : RandomPicks.randomFrom(random(), nodesA); + DiscoveryNode masterB = randomBoolean() ? null : RandomPicks.randomFrom(random(), nodesB); + + DiscoveryNodes.Builder builderA = DiscoveryNodes.builder(); + nodesA.stream().forEach(builderA::put); + final String masterAId = masterA == null ? null : masterA.getId(); + builderA.masterNodeId(masterAId); + builderA.localNodeId(RandomPicks.randomFrom(random(), nodesA).getId()); + + DiscoveryNodes.Builder builderB = DiscoveryNodes.builder(); + nodesB.stream().forEach(builderB::put); + final String masterBId = masterB == null ? null : masterB.getId(); + builderB.masterNodeId(masterBId); + builderB.localNodeId(RandomPicks.randomFrom(random(), nodesB).getId()); + + final DiscoveryNodes discoNodesA = builderA.build(); + final DiscoveryNodes discoNodesB = builderB.build(); + logger.info("nodes A: {}", discoNodesA.prettyPrint()); + logger.info("nodes B: {}", discoNodesB.prettyPrint()); + + DiscoveryNodes.Delta delta = discoNodesB.delta(discoNodesA); + + if (masterB == null || Objects.equals(masterAId, masterBId)) { + assertFalse(delta.masterNodeChanged()); + assertThat(delta.previousMasterNode(), nullValue()); + assertThat(delta.newMasterNode(), nullValue()); + } else { + assertTrue(delta.masterNodeChanged()); + assertThat(delta.newMasterNode().getId(), equalTo(masterBId)); + assertThat(delta.previousMasterNode() != null ? delta.previousMasterNode().getId() : null, + equalTo(masterAId)); + } + + Set newNodes = new HashSet<>(nodesB); + newNodes.removeAll(nodesA); + assertThat(delta.added(), equalTo(newNodes.isEmpty() == false)); + assertThat(delta.addedNodes(), containsInAnyOrder(newNodes.stream().collect(Collectors.toList()).toArray())); + assertThat(delta.addedNodes().size(), equalTo(newNodes.size())); + + Set removedNodes = new HashSet<>(nodesA); + removedNodes.removeAll(nodesB); + assertThat(delta.removed(), equalTo(removedNodes.isEmpty() == false)); + assertThat(delta.removedNodes(), containsInAnyOrder(removedNodes.stream().collect(Collectors.toList()).toArray())); + assertThat(delta.removedNodes().size(), equalTo(removedNodes.size())); + } + + private static AtomicInteger idGenerator = new AtomicInteger(); + + private static List randomNodes(final int numNodes) { List nodesList = new ArrayList<>(); for (int i = 0; i < numNodes; i++) { Map attributes = new HashMap<>(); if (frequently()) { attributes.put("custom", randomBoolean() ? "match" : randomAsciiOfLengthBetween(3, 5)); } - final DiscoveryNode node = newNode(i, attributes, new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values())))); - discoBuilder = discoBuilder.put(node); + final DiscoveryNode node = newNode(idGenerator.getAndIncrement(), attributes, + new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values())))); nodesList.add(node); } + return nodesList; + } + + private static DiscoveryNodes buildDiscoveryNodes() { + int numNodes = randomIntBetween(1, 10); + DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); + List nodesList = randomNodes(numNodes); + for (DiscoveryNode node : nodesList) { + discoBuilder = discoBuilder.put(node); + } discoBuilder.localNodeId(randomFrom(nodesList).getId()); discoBuilder.masterNodeId(randomFrom(nodesList).getId()); return discoBuilder.build(); } private static DiscoveryNode newNode(int nodeId, Map attributes, Set roles) { - return new DiscoveryNode("name_" + nodeId, "node_" + nodeId, DummyTransportAddress.INSTANCE, attributes, roles, Version.CURRENT); + return new DiscoveryNode("name_" + nodeId, "node_" + nodeId, LocalTransportAddress.buildUnique(), attributes, roles, + Version.CURRENT); } private enum NodeSelector { @@ -152,7 +230,7 @@ public class DiscoveryNodesTests extends ESTestCase { nodes.getIngestNodes().keysIt().forEachRemaining(ids::add); return ids; } - },CUSTOM_ATTRIBUTE("attr:value") { + }, CUSTOM_ATTRIBUTE("attr:value") { @Override Set matchingNodeIds(DiscoveryNodes nodes) { Set ids = new HashSet<>(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index f990f382c8c..f267af66dc6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.cluster.routing; import com.carrotsearch.hppc.cursors.IntObjectCursor; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequestBuilder; import org.elasticsearch.action.admin.indices.shards.IndicesShardStoresResponse; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.allocation.command.AllocateEmptyPrimaryAllocationCommand; import org.elasticsearch.cluster.routing.allocation.command.AllocateStalePrimaryAllocationCommand; @@ -108,7 +109,7 @@ public class PrimaryAllocationIT extends ESIntegTestCase { logger.info("--> check that old primary shard does not get promoted to primary again"); // kick reroute and wait for all shard states to be fetched client(master).admin().cluster().prepareReroute().get(); - assertBusy(new Runnable() { + assertBusy(new Runnable() { @Override public void run() { assertThat(internalCluster().getInstance(GatewayAllocator.class, master).getNumberOfInFlightFetch(), equalTo(0)); @@ -157,7 +158,8 @@ public class PrimaryAllocationIT extends ESIntegTestCase { createStaleReplicaScenario(); logger.info("--> explicitly promote old primary shard"); - ImmutableOpenIntMap> storeStatuses = client().admin().indices().prepareShardStores("test").get().getStoreStatuses().get("test"); + final String idxName = "test"; + ImmutableOpenIntMap> storeStatuses = client().admin().indices().prepareShardStores(idxName).get().getStoreStatuses().get(idxName); ClusterRerouteRequestBuilder rerouteBuilder = client().admin().cluster().prepareReroute(); for (IntObjectCursor> shardStoreStatuses : storeStatuses) { int shardId = shardStoreStatuses.key; @@ -165,22 +167,30 @@ public class PrimaryAllocationIT extends ESIntegTestCase { logger.info("--> adding allocation command for shard {}", shardId); // force allocation based on node id if (useStaleReplica) { - rerouteBuilder.add(new AllocateStalePrimaryAllocationCommand("test", shardId, storeStatus.getNode().getId(), true)); + rerouteBuilder.add(new AllocateStalePrimaryAllocationCommand(idxName, shardId, storeStatus.getNode().getId(), true)); } else { - rerouteBuilder.add(new AllocateEmptyPrimaryAllocationCommand("test", shardId, storeStatus.getNode().getId(), true)); + rerouteBuilder.add(new AllocateEmptyPrimaryAllocationCommand(idxName, shardId, storeStatus.getNode().getId(), true)); } } rerouteBuilder.get(); logger.info("--> check that the stale primary shard gets allocated and that documents are available"); - ensureYellow("test"); + ensureYellow(idxName); - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(), useStaleReplica ? 1L : 0L); + if (useStaleReplica == false) { + // When invoking AllocateEmptyPrimaryAllocationCommand, due to the UnassignedInfo.Reason being changed to INDEX_CREATION, + // its possible that the shard has not completed initialization, even though the cluster health is yellow, so the + // search can throw an "all shards failed" exception. We will wait until the shard initialization has completed before + // verifying the search hit count. + assertBusy(() -> assertTrue(clusterService().state().routingTable().index(idxName).allPrimaryShardsActive())); + + } + assertHitCount(client().prepareSearch(idxName).setSize(0).setQuery(matchAllQuery()).get(), useStaleReplica ? 1L : 0L); } public void testForcePrimaryShardIfAllocationDecidersSayNoAfterIndexCreation() throws ExecutionException, InterruptedException { String node = internalCluster().startNode(); - client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + client().admin().indices().prepareCreate("test").setWaitForActiveShards(ActiveShardCount.NONE).setSettings(Settings.builder() .put("index.routing.allocation.exclude._name", node) .put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).get(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryTermsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryTermsTests.java index 2d3e44db68a..32072282d6f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryTermsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/PrimaryTermsTests.java @@ -50,7 +50,7 @@ public class PrimaryTermsTests extends ESAllocationTestCase { private RoutingTable testRoutingTable; private int numberOfShards; private int numberOfReplicas; - private final static Settings DEFAULT_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + private static final Settings DEFAULT_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); private AllocationService allocationService; private ClusterState clusterState; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableGenerator.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableGenerator.java index cea89b09542..62002ad1f99 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableGenerator.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableGenerator.java @@ -24,6 +24,8 @@ import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.index.shard.ShardId; +import static org.elasticsearch.cluster.health.ClusterShardHealth.getInactivePrimaryHealth; + public class RoutingTableGenerator { private static int node_id = 1; @@ -56,14 +58,15 @@ public class RoutingTableGenerator { } - public IndexShardRoutingTable genShardRoutingTable(String index, int shardId, int replicas, ShardCounter counter) { + public IndexShardRoutingTable genShardRoutingTable(IndexMetaData indexMetaData, int shardId, ShardCounter counter) { + final String index = indexMetaData.getIndex().getName(); IndexShardRoutingTable.Builder builder = new IndexShardRoutingTable.Builder(new ShardId(index, "_na_", shardId)); ShardRouting shardRouting = genShardRouting(index, shardId, true); - counter.update(shardRouting); + counter.update(shardRouting, indexMetaData); builder.addShard(shardRouting); - for (; replicas > 0; replicas--) { + for (int replicas = indexMetaData.getNumberOfReplicas(); replicas > 0; replicas--) { shardRouting = genShardRouting(index, shardId, false); - counter.update(shardRouting); + counter.update(shardRouting, indexMetaData); builder.addShard(shardRouting); } @@ -73,8 +76,7 @@ public class RoutingTableGenerator { public IndexRoutingTable genIndexRoutingTable(IndexMetaData indexMetaData, ShardCounter counter) { IndexRoutingTable.Builder builder = IndexRoutingTable.builder(indexMetaData.getIndex()); for (int shard = 0; shard < indexMetaData.getNumberOfShards(); shard++) { - builder.addIndexShard(genShardRoutingTable(indexMetaData.getIndex().getName(), shard, - indexMetaData.getNumberOfReplicas(), counter)); + builder.addIndexShard(genShardRoutingTable(indexMetaData, shard, counter)); } return builder.build(); } @@ -86,10 +88,15 @@ public class RoutingTableGenerator { public int unassigned; public int primaryActive; public int primaryInactive; + private boolean inactivePrimaryCausesRed = false; public ClusterHealthStatus status() { if (primaryInactive > 0) { - return ClusterHealthStatus.RED; + if (inactivePrimaryCausesRed) { + return ClusterHealthStatus.RED; + } else { + return ClusterHealthStatus.YELLOW; + } } if (unassigned > 0 || initializing > 0) { return ClusterHealthStatus.YELLOW; @@ -97,7 +104,7 @@ public class RoutingTableGenerator { return ClusterHealthStatus.GREEN; } - public void update(ShardRouting shardRouting) { + public void update(ShardRouting shardRouting, IndexMetaData indexMetaData) { if (shardRouting.active()) { active++; if (shardRouting.primary()) { @@ -111,6 +118,9 @@ public class RoutingTableGenerator { if (shardRouting.primary()) { primaryInactive++; + if (inactivePrimaryCausesRed == false) { + inactivePrimaryCausesRed = getInactivePrimaryHealth(shardRouting, indexMetaData) == ClusterHealthStatus.RED; + } } if (shardRouting.initializing()) { initializing++; diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java index 2d1a467a001..9da5e76ed1f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/RoutingTableTests.java @@ -47,7 +47,7 @@ public class RoutingTableTests extends ESAllocationTestCase { private int numberOfReplicas; private int shardsPerIndex; private int totalNumberOfShards; - private final static Settings DEFAULT_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + private static final Settings DEFAULT_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); private final AllocationService ALLOCATION_SERVICE = createAllocationService(Settings.builder() .put("cluster.routing.allocation.node_concurrent_recoveries", Integer.MAX_VALUE) // don't limit recoveries .put("cluster.routing.allocation.node_initial_primaries_recoveries", Integer.MAX_VALUE) diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java index 7267252b19f..fa9133f6d36 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/ShardRoutingTests.java @@ -206,7 +206,7 @@ public class ShardRoutingTests extends ESTestCase { if (randomBoolean()) { BytesStreamOutput out = new BytesStreamOutput(); routing.writeTo(out); - routing = new ShardRouting(StreamInput.wrap(out.bytes())); + routing = new ShardRouting(out.bytes().streamInput()); } if (routing.initializing() || routing.relocating()) { assertEquals(routing.toString(), byteSize, routing.getExpectedShardSize()); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java index 75300a4beb8..0854d27e208 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java @@ -26,19 +26,22 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.UnassignedInfo.AllocationStatus; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.test.ESAllocationTestCase; +import java.io.IOException; +import java.nio.ByteBuffer; import java.util.Collections; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; @@ -76,13 +79,14 @@ public class UnassignedInfoTests extends ESAllocationTestCase { public void testSerialization() throws Exception { UnassignedInfo.Reason reason = RandomPicks.randomFrom(random(), UnassignedInfo.Reason.values()); UnassignedInfo meta = reason == UnassignedInfo.Reason.ALLOCATION_FAILED ? - new UnassignedInfo(reason, randomBoolean() ? randomAsciiOfLength(4) : null, null, randomIntBetween(1, 100), System.nanoTime(), System.currentTimeMillis(), false): + new UnassignedInfo(reason, randomBoolean() ? randomAsciiOfLength(4) : null, null, randomIntBetween(1, 100), System.nanoTime(), + System.currentTimeMillis(), false, AllocationStatus.NO_ATTEMPT): new UnassignedInfo(reason, randomBoolean() ? randomAsciiOfLength(4) : null); BytesStreamOutput out = new BytesStreamOutput(); meta.writeTo(out); out.close(); - UnassignedInfo read = new UnassignedInfo(StreamInput.wrap(out.bytes())); + UnassignedInfo read = new UnassignedInfo(out.bytes().streamInput()); assertThat(read.getReason(), equalTo(meta.getReason())); assertThat(read.getUnassignedTimeInMillis(), equalTo(meta.getUnassignedTimeInMillis())); assertThat(read.getMessage(), equalTo(meta.getMessage())); @@ -265,7 +269,8 @@ public class UnassignedInfoTests extends ESAllocationTestCase { */ public void testRemainingDelayCalculation() throws Exception { final long baseTime = System.nanoTime(); - UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "test", null, 0, baseTime, System.currentTimeMillis(), randomBoolean()); + UnassignedInfo unassignedInfo = new UnassignedInfo(UnassignedInfo.Reason.NODE_LEFT, "test", null, 0, baseTime, + System.currentTimeMillis(), randomBoolean(), AllocationStatus.NO_ATTEMPT); final long totalDelayNanos = TimeValue.timeValueMillis(10).nanos(); final Settings indexSettings = Settings.builder().put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), TimeValue.timeValueNanos(totalDelayNanos)).build(); long delay = unassignedInfo.getRemainingDelay(baseTime, indexSettings); @@ -340,4 +345,14 @@ public class UnassignedInfoTests extends ESAllocationTestCase { assertThat(UnassignedInfo.findNextDelayedAllocation(baseTime + delta, clusterState), equalTo(expectMinDelaySettingsNanos - delta)); } + + public void testAllocationStatusSerialization() throws IOException { + for (AllocationStatus allocationStatus : AllocationStatus.values()) { + BytesStreamOutput out = new BytesStreamOutput(); + allocationStatus.writeTo(out); + ByteBufferStreamInput in = new ByteBufferStreamInput(ByteBuffer.wrap(out.bytes().toBytesRef().bytes)); + AllocationStatus readStatus = AllocationStatus.readFrom(in); + assertThat(readStatus, equalTo(allocationStatus)); + } + } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index 28f27b8988c..f95fb687c76 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -430,7 +430,7 @@ public class AllocationCommandsTests extends ESAllocationTestCase { ); BytesStreamOutput bytes = new BytesStreamOutput(); AllocationCommands.writeTo(commands, bytes); - StreamInput in = StreamInput.wrap(bytes.bytes()); + StreamInput in = bytes.bytes().streamInput(); // Since the commands are named writeable we need to register them and wrap the input stream NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java index 95ce9d668ea..458432ff78e 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; @@ -642,7 +643,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { while (iterator.hasNext()) { ShardRouting next = iterator.next(); if ("test1".equals(next.index().getName())) { - iterator.removeAndIgnore(); + iterator.removeAndIgnore(UnassignedInfo.AllocationStatus.NO_ATTEMPT); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DecisionsImpactOnClusterHealthTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DecisionsImpactOnClusterHealthTests.java new file mode 100644 index 00000000000..e9f487b1e10 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/DecisionsImpactOnClusterHealthTests.java @@ -0,0 +1,169 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.routing.allocation; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.EmptyClusterInfoService; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.health.ClusterStateHealth; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.test.ESAllocationTestCase; +import org.elasticsearch.test.gateway.NoopGatewayAllocator; + +import java.io.IOException; +import java.util.Collections; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +/** + * This class of tests exercise various scenarios of + * primary shard allocation and assert the cluster health + * has the correct status based on those allocation decisions. + */ +public class DecisionsImpactOnClusterHealthTests extends ESAllocationTestCase { + + public void testPrimaryShardNoDecisionOnIndexCreation() throws IOException { + final String indexName = "test-idx"; + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) + .build(); + AllocationDecider decider = new TestAllocateDecision(Decision.NO); + // if deciders say NO to allocating a primary shard, then the cluster health should be RED + runAllocationTest( + settings, indexName, Collections.singleton(decider), ClusterHealthStatus.RED + ); + } + + public void testPrimaryShardThrottleDecisionOnIndexCreation() throws IOException { + final String indexName = "test-idx"; + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) + .build(); + AllocationDecider decider = new TestAllocateDecision(Decision.THROTTLE) { + // the only allocation decider that implements this is ShardsLimitAllocationDecider and it always + // returns only YES or NO, never THROTTLE + @Override + public Decision canAllocate(RoutingNode node, RoutingAllocation allocation) { + return randomBoolean() ? Decision.YES : Decision.NO; + } + }; + // if deciders THROTTLE allocating a primary shard, stay in YELLOW state + runAllocationTest( + settings, indexName, Collections.singleton(decider), ClusterHealthStatus.YELLOW + ); + } + + public void testPrimaryShardYesDecisionOnIndexCreation() throws IOException { + final String indexName = "test-idx"; + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString()) + .build(); + AllocationDecider decider = new TestAllocateDecision(Decision.YES) { + @Override + public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + if (node.getByShardId(shardRouting.shardId()) == null) { + return Decision.YES; + } else { + return Decision.NO; + } + } + }; + // if deciders say YES to allocating primary shards, stay in YELLOW state + ClusterState clusterState = runAllocationTest( + settings, indexName, Collections.singleton(decider), ClusterHealthStatus.YELLOW + ); + // make sure primaries are initialized + RoutingTable routingTable = clusterState.routingTable(); + for (IndexShardRoutingTable indexShardRoutingTable : routingTable.index(indexName)) { + assertTrue(indexShardRoutingTable.primaryShard().initializing()); + } + } + + private ClusterState runAllocationTest(final Settings settings, + final String indexName, + final Set allocationDeciders, + final ClusterHealthStatus expectedStatus) throws IOException { + + final String clusterName = "test-cluster"; + final AllocationService allocationService = newAllocationService(settings, allocationDeciders); + + logger.info("Building initial routing table"); + final int numShards = randomIntBetween(1, 5); + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder(indexName) + .settings(settings(Version.CURRENT)) + .numberOfShards(numShards) + .numberOfReplicas(1)) + .build(); + + RoutingTable routingTable = RoutingTable.builder() + .addAsNew(metaData.index(indexName)) + .build(); + + ClusterState clusterState = ClusterState.builder(new ClusterName(clusterName)) + .metaData(metaData) + .routingTable(routingTable) + .build(); + + logger.info("--> adding nodes"); + // we need at least as many nodes as shards for the THROTTLE case, because + // once a shard has been throttled on a node, that node no longer accepts + // any allocations on it + final DiscoveryNodes.Builder discoveryNodes = DiscoveryNodes.builder(); + for (int i = 0; i < numShards; i++) { + discoveryNodes.put(newNode("node" + i)); + } + clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).build(); + + logger.info("--> do the reroute"); + routingTable = allocationService.reroute(clusterState, "reroute").routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + + logger.info("--> assert cluster health"); + ClusterStateHealth health = new ClusterStateHealth(clusterState); + assertThat(health.getStatus(), equalTo(expectedStatus)); + + return clusterState; + } + + private static AllocationService newAllocationService(Settings settings, Set deciders) { + return new AllocationService(settings, + new AllocationDeciders(settings, deciders), + NoopGatewayAllocator.INSTANCE, + new BalancedShardsAllocator(settings), + EmptyClusterInfoService.INSTANCE); + } + +} diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index 6dd4df59060..9859cd4d570 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -25,8 +25,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.EmptyClusterInfoService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.UUIDs; -import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; @@ -44,12 +42,14 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.NodeVersionAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.ESAllocationTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.gateway.NoopGatewayAllocator; @@ -307,11 +307,11 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { public void testRebalanceDoesNotAllocatePrimaryAndReplicasOnDifferentVersionNodes() { ShardId shard1 = new ShardId("test1", "_na_", 0); ShardId shard2 = new ShardId("test2", "_na_", 0); - final DiscoveryNode newNode = new DiscoveryNode("newNode", DummyTransportAddress.INSTANCE, emptyMap(), + final DiscoveryNode newNode = new DiscoveryNode("newNode", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, Version.CURRENT); - final DiscoveryNode oldNode1 = new DiscoveryNode("oldNode1", DummyTransportAddress.INSTANCE, emptyMap(), + final DiscoveryNode oldNode1 = new DiscoveryNode("oldNode1", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, VersionUtils.getPreviousVersion()); - final DiscoveryNode oldNode2 = new DiscoveryNode("oldNode2", DummyTransportAddress.INSTANCE, emptyMap(), + final DiscoveryNode oldNode2 = new DiscoveryNode("oldNode2", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, VersionUtils.getPreviousVersion()); MetaData metaData = MetaData.builder() .put(IndexMetaData.builder(shard1.getIndexName()).settings(settings(Version.CURRENT).put(Settings.EMPTY)).numberOfShards(1).numberOfReplicas(1)) @@ -347,11 +347,11 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { } public void testRestoreDoesNotAllocateSnapshotOnOlderNodes() { - final DiscoveryNode newNode = new DiscoveryNode("newNode", DummyTransportAddress.INSTANCE, emptyMap(), + final DiscoveryNode newNode = new DiscoveryNode("newNode", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, Version.CURRENT); - final DiscoveryNode oldNode1 = new DiscoveryNode("oldNode1", DummyTransportAddress.INSTANCE, emptyMap(), + final DiscoveryNode oldNode1 = new DiscoveryNode("oldNode1", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, VersionUtils.getPreviousVersion()); - final DiscoveryNode oldNode2 = new DiscoveryNode("oldNode2", DummyTransportAddress.INSTANCE, emptyMap(), + final DiscoveryNode oldNode2 = new DiscoveryNode("oldNode2", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, VersionUtils.getPreviousVersion()); int numberOfShards = randomIntBetween(1, 3); @@ -407,7 +407,7 @@ public class NodeVersionAllocationDeciderTests extends ESAllocationTestCase { return clusterState; } - private final void assertRecoveryNodeVersions(RoutingNodes routingNodes) { + private void assertRecoveryNodeVersions(RoutingNodes routingNodes) { logger.trace("RoutingNodes: {}", routingNodes.prettyPrint()); List mutableShardRoutings = routingNodes.shardsWithState(ShardRoutingState.RELOCATING); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java index 3d0475ed137..e09d9790651 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/SameShardRoutingTests.java @@ -33,7 +33,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationD import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESAllocationTestCase; import static java.util.Collections.emptyMap; @@ -63,9 +63,9 @@ public class SameShardRoutingTests extends ESAllocationTestCase { logger.info("--> adding two nodes with the same host"); clusterState = ClusterState.builder(clusterState).nodes( DiscoveryNodes.builder() - .put(new DiscoveryNode("node1", "node1", "test1", "test1", DummyTransportAddress.INSTANCE, emptyMap(), + .put(new DiscoveryNode("node1", "node1", "node1", "test1", "test1", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, Version.CURRENT)) - .put(new DiscoveryNode("node2", "node2", "test1", "test1", DummyTransportAddress.INSTANCE, emptyMap(), + .put(new DiscoveryNode("node2", "node2", "node2", "test1", "test1", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, Version.CURRENT))).build(); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); @@ -82,7 +82,7 @@ public class SameShardRoutingTests extends ESAllocationTestCase { logger.info("--> add another node, with a different host, replicas will be allocating"); clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()) - .put(new DiscoveryNode("node3", "node3", "test2", "test2", DummyTransportAddress.INSTANCE, emptyMap(), + .put(new DiscoveryNode("node3", "node3", "node3", "test2", "test2", LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, Version.CURRENT))).build(); routingTable = strategy.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index be50c5f5331..56ca6381af9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -41,7 +41,6 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.Index; @@ -110,9 +109,9 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase { final Index index = metaData.index("test").getIndex(); ShardRouting test_0 = ShardRouting.newUnassigned(new ShardId(index, 0), null, true, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo")); - DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + DiscoveryNode node_0 = new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(), Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT); - DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + DiscoveryNode node_1 = new DiscoveryNode("node_1", LocalTransportAddress.buildUnique(), Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT); RoutingTable routingTable = RoutingTable.builder() @@ -149,9 +148,9 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase { DiskThresholdDecider decider = new DiskThresholdDecider(Settings.EMPTY, nss, cis, null); ImmutableOpenMap.Builder shardRoutingMap = ImmutableOpenMap.builder(); - DiscoveryNode node_0 = new DiscoveryNode("node_0", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + DiscoveryNode node_0 = new DiscoveryNode("node_0", LocalTransportAddress.buildUnique(), Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT); - DiscoveryNode node_1 = new DiscoveryNode("node_1", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + DiscoveryNode node_1 = new DiscoveryNode("node_1", LocalTransportAddress.buildUnique(), Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())), Version.CURRENT); MetaData metaData = MetaData.builder() diff --git a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java index 7b81d3ece27..4fa6615ac45 100644 --- a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java @@ -80,7 +80,7 @@ public class ClusterSerializationTests extends ESAllocationTestCase { BytesStreamOutput outStream = new BytesStreamOutput(); source.writeTo(outStream); - StreamInput inStream = StreamInput.wrap(outStream.bytes().toBytes()); + StreamInput inStream = outStream.bytes().streamInput(); RoutingTable target = RoutingTable.Builder.readFrom(inStream); assertThat(target.prettyPrint(), equalTo(source.prettyPrint())); diff --git a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterStateToStringTests.java b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterStateToStringTests.java index 99cde60f086..9957a6d3603 100644 --- a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterStateToStringTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterStateToStringTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.test.ESAllocationTestCase; import static java.util.Collections.emptyMap; @@ -50,7 +50,7 @@ public class ClusterStateToStringTests extends ESAllocationTestCase { .addAsNew(metaData.index("test_idx")) .build(); - DiscoveryNodes nodes = DiscoveryNodes.builder().put(new DiscoveryNode("node_foo", DummyTransportAddress.INSTANCE, + DiscoveryNodes nodes = DiscoveryNodes.builder().put(new DiscoveryNode("node_foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)).localNodeId("node_foo").masterNodeId("node_foo").build(); ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).nodes(nodes) diff --git a/core/src/test/java/org/elasticsearch/cluster/serialization/DiffableTests.java b/core/src/test/java/org/elasticsearch/cluster/serialization/DiffableTests.java index 452c6054576..611c261e334 100644 --- a/core/src/test/java/org/elasticsearch/cluster/serialization/DiffableTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/serialization/DiffableTests.java @@ -310,7 +310,7 @@ public class DiffableTests extends ESTestCase { logger.debug("--> serializing diff"); BytesStreamOutput out = new BytesStreamOutput(); diffMap.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); logger.debug("--> reading diff back"); diffMap = readDiff(in); } diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java index 991f11a4493..68537038eb2 100644 --- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceIT.java @@ -57,9 +57,6 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -/** - * - */ @ClusterScope(scope = Scope.TEST, numDataNodes = 0) @ESIntegTestCase.SuppressLocalMode public class ClusterServiceIT extends ESIntegTestCase { @@ -94,7 +91,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onAllNodesAcked(@Nullable Throwable t) { + public void onAllNodesAcked(@Nullable Exception e) { allNodesAcked.set(true); latch.countDown(); } @@ -127,8 +124,8 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.error("failed to execute callback in test {}", t, source); + public void onFailure(String source, Exception e) { + logger.error("failed to execute callback in test {}", e, source); onFailure.set(true); latch.countDown(); } @@ -165,7 +162,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onAllNodesAcked(@Nullable Throwable t) { + public void onAllNodesAcked(@Nullable Exception e) { allNodesAcked.set(true); latch.countDown(); } @@ -198,8 +195,8 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.error("failed to execute callback in test {}", t, source); + public void onFailure(String source, Exception e) { + logger.error("failed to execute callback in test {}", e, source); onFailure.set(true); latch.countDown(); } @@ -240,7 +237,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onAllNodesAcked(@Nullable Throwable t) { + public void onAllNodesAcked(@Nullable Exception e) { allNodesAcked.set(true); latch.countDown(); } @@ -272,8 +269,8 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.error("failed to execute callback in test {}", t, source); + public void onFailure(String source, Exception e) { + logger.error("failed to execute callback in test {}", e, source); onFailure.set(true); latch.countDown(); } @@ -313,7 +310,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onAllNodesAcked(@Nullable Throwable t) { + public void onAllNodesAcked(@Nullable Exception e) { allNodesAcked.set(true); latch.countDown(); } @@ -346,8 +343,8 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.error("failed to execute callback in test {}", t, source); + public void onFailure(String source, Exception e) { + logger.error("failed to execute callback in test {}", e, source); onFailure.set(true); latch.countDown(); } @@ -388,7 +385,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { invoked1.countDown(); fail(); } @@ -403,7 +400,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } @@ -458,7 +455,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { invoked3.countDown(); fail(); } @@ -473,7 +470,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -520,7 +517,6 @@ public class ClusterServiceIT extends ESIntegTestCase { assertThat(clusterService.state().nodes().getMasterNode(), notNullValue()); assertThat(clusterService.state().nodes().isLocalNodeElectedMaster(), is(true)); assertThat(testService.master(), is(true)); - String node_1 = internalCluster().startNode(settings); final ClusterService clusterService1 = internalCluster().getInstance(ClusterService.class, node_1); MasterAwareService testService1 = internalCluster().getInstance(MasterAwareService.class, node_1); @@ -583,7 +579,7 @@ public class ClusterServiceIT extends ESIntegTestCase { public static class TestPlugin extends Plugin { @Override - public Collection> nodeServices() { + public Collection> getGuiceServiceClasses() { List> services = new ArrayList<>(1); services.add(MasterAwareService.class); return services; @@ -591,7 +587,7 @@ public class ClusterServiceIT extends ESIntegTestCase { } @Singleton - public static class MasterAwareService extends AbstractLifecycleComponent implements LocalNodeMasterListener { + public static class MasterAwareService extends AbstractLifecycleComponent implements LocalNodeMasterListener { private final ClusterService clusterService; private volatile boolean master; diff --git a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java index 66f96f8cd3a..1002774d2ca 100644 --- a/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/service/ClusterServiceTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; @@ -69,10 +69,14 @@ import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.elasticsearch.test.ClusterServiceUtils.setState; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.is; public class ClusterServiceTests extends ESTestCase { @@ -109,7 +113,7 @@ public class ClusterServiceTests extends ESTestCase { TimedClusterService timedClusterService = new TimedClusterService(Settings.builder().put("cluster.name", "ClusterServiceTests").build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), threadPool); - timedClusterService.setLocalNode(new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, emptyMap(), + timedClusterService.setLocalNode(new DiscoveryNode("node1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)); timedClusterService.setNodeConnectionsService(new NodeConnectionsService(Settings.EMPTY, null, null) { @Override @@ -149,8 +153,8 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { - throw new RuntimeException(t); + public void onFailure(String source, Exception e) { + throw new RuntimeException(e); } }); @@ -163,7 +167,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { timedOut.countDown(); } @@ -183,8 +187,8 @@ public class ClusterServiceTests extends ESTestCase { final CountDownLatch allProcessed = new CountDownLatch(1); clusterService.submitStateUpdateTask("test3", new ClusterStateUpdateTask() { @Override - public void onFailure(String source, Throwable t) { - throw new RuntimeException(t); + public void onFailure(String source, Exception e) { + throw new RuntimeException(e); } @Override @@ -212,7 +216,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { taskFailed[0] = true; latch1.countDown(); } @@ -237,7 +241,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { taskFailed[0] = true; latch2.countDown(); } @@ -286,7 +290,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { } } ); @@ -326,9 +330,9 @@ public class ClusterServiceTests extends ESTestCase { ClusterStateTaskListener listener = new ClusterStateTaskListener() { @Override - public void onFailure(String source, Throwable t) { - logger.error("unexpected failure: [{}]", t, source); - failures.add(new Tuple<>(source, t)); + public void onFailure(String source, Exception e) { + logger.error("unexpected failure: [{}]", e, source); + failures.add(new Tuple<>(source, e)); updateLatch.countDown(); } @@ -387,8 +391,8 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { - fail(ExceptionsHelper.detailedMessage(t)); + public void onFailure(String source, Exception e) { + fail(ExceptionsHelper.detailedMessage(e)); } })) ; } @@ -523,8 +527,8 @@ public class ClusterServiceTests extends ESTestCase { final CountDownLatch updateLatch = new CountDownLatch(totalTaskCount); final ClusterStateTaskListener listener = new ClusterStateTaskListener() { @Override - public void onFailure(String source, Throwable t) { - fail(ExceptionsHelper.detailedMessage(t)); + public void onFailure(String source, Exception e) { + fail(ExceptionsHelper.detailedMessage(e)); } @Override @@ -608,13 +612,12 @@ public class ClusterServiceTests extends ESTestCase { BlockingTask block = new BlockingTask(Priority.IMMEDIATE); clusterService.submitStateUpdateTask("test", block); int taskCount = randomIntBetween(5, 20); - Priority[] priorities = Priority.values(); // will hold all the tasks in the order in which they were executed List tasks = new ArrayList<>(taskCount); CountDownLatch latch = new CountDownLatch(taskCount); for (int i = 0; i < taskCount; i++) { - Priority priority = priorities[randomIntBetween(0, priorities.length - 1)]; + Priority priority = randomFrom(Priority.values()); clusterService.submitStateUpdateTask("test", new PrioritizedTask(priority, latch, tasks)); } @@ -647,15 +650,22 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { - fail(ExceptionsHelper.detailedMessage(t)); + public void onFailure(String source, Exception e) { + fail(ExceptionsHelper.detailedMessage(e)); } }; clusterService.submitStateUpdateTask("first time", task, ClusterStateTaskConfig.build(Priority.NORMAL), executor, listener); - expectThrows(IllegalArgumentException.class, () -> clusterService.submitStateUpdateTask("second time", task, - ClusterStateTaskConfig.build(Priority.NORMAL), executor, listener)); + final IllegalStateException e = + expectThrows( + IllegalStateException.class, + () -> clusterService.submitStateUpdateTask( + "second time", + task, + ClusterStateTaskConfig.build(Priority.NORMAL), + executor, listener)); + assertThat(e, hasToString(containsString("task [1] with source [second time] is already queued"))); clusterService.submitStateUpdateTask("third time a charm", new SimpleTask(1), ClusterStateTaskConfig.build(Priority.NORMAL), executor, listener); @@ -693,7 +703,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -710,7 +720,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { latch.countDown(); } }); @@ -727,7 +737,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -745,7 +755,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -788,7 +798,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -807,7 +817,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { latch.countDown(); } }); @@ -824,7 +834,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -841,7 +851,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -859,7 +869,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } }); @@ -886,6 +896,11 @@ public class ClusterServiceTests extends ESTestCase { public boolean equals(Object obj) { return super.equals(obj); } + + @Override + public String toString() { + return Integer.toString(id); + } } private static class BlockingTask extends ClusterStateUpdateTask implements Releasable { @@ -902,7 +917,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { } public void close() { @@ -930,7 +945,7 @@ public class ClusterServiceTests extends ESTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { latch.countDown(); } } diff --git a/core/src/test/java/org/elasticsearch/common/BooleansTests.java b/core/src/test/java/org/elasticsearch/common/BooleansTests.java index 6e5446cebf9..176c4c75dc7 100644 --- a/core/src/test/java/org/elasticsearch/common/BooleansTests.java +++ b/core/src/test/java/org/elasticsearch/common/BooleansTests.java @@ -51,9 +51,9 @@ public class BooleansTests extends ESTestCase { assertThat(Booleans.parseBoolean(null, false), is(false)); assertThat(Booleans.parseBoolean(null, true), is(true)); - assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes", "1"), randomFrom(null, Boolean.TRUE, Boolean.FALSE)), is(true)); - assertThat(Booleans.parseBoolean(randomFrom("false", "off", "no", "0"), randomFrom(null, Boolean.TRUE, Boolean.FALSE)), is(false)); - assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT),randomFrom(null, Boolean.TRUE, Boolean.FALSE)), is(true)); + assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes", "1"), randomFrom(Boolean.TRUE, Boolean.FALSE, null)), is(true)); + assertThat(Booleans.parseBoolean(randomFrom("false", "off", "no", "0"), randomFrom(Boolean.TRUE, Boolean.FALSE, null)), is(false)); + assertThat(Booleans.parseBoolean(randomFrom("true", "on", "yes").toUpperCase(Locale.ROOT),randomFrom(Boolean.TRUE, Boolean.FALSE, null)), is(true)); assertThat(Booleans.parseBoolean(null, Boolean.FALSE), is(false)); assertThat(Booleans.parseBoolean(null, Boolean.TRUE), is(true)); assertThat(Booleans.parseBoolean(null, null), nullValue()); @@ -70,7 +70,7 @@ public class BooleansTests extends ESTestCase { assertThat(Booleans.parseBooleanExact(randomFrom("true", "on", "yes", "1")), is(true)); assertThat(Booleans.parseBooleanExact(randomFrom("false", "off", "no", "0")), is(false)); try { - Booleans.parseBooleanExact(randomFrom(null, "fred", "foo", "barney")); + Booleans.parseBooleanExact(randomFrom("fred", "foo", "barney", null)); fail("Expected exception while parsing invalid boolean value "); } catch (Exception ex) { assertTrue(ex instanceof IllegalArgumentException); diff --git a/core/src/test/java/org/elasticsearch/common/ChannelsTests.java b/core/src/test/java/org/elasticsearch/common/ChannelsTests.java index 4f2bad36d4a..c0cb3482b0e 100644 --- a/core/src/test/java/org/elasticsearch/common/ChannelsTests.java +++ b/core/src/test/java/org/elasticsearch/common/ChannelsTests.java @@ -19,14 +19,11 @@ package org.elasticsearch.common; -import org.elasticsearch.common.bytes.ByteBufferBytesReference; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Channels; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; -import org.jboss.netty.buffer.ByteBufferBackedChannelBuffer; -import org.jboss.netty.buffer.ChannelBuffer; import org.junit.After; import org.junit.Before; @@ -85,7 +82,7 @@ public class ChannelsTests extends ESTestCase { BytesReference source = new BytesArray(randomBytes, offset + offsetToRead, lengthToRead); BytesReference read = new BytesArray(readBytes, offset + offsetToRead, lengthToRead); - assertThat("read bytes didn't match written bytes", source.toBytes(), Matchers.equalTo(read.toBytes())); + assertThat("read bytes didn't match written bytes", BytesReference.toBytes(source), Matchers.equalTo(BytesReference.toBytes(read))); } public void testBufferReadPastEOFWithException() throws Exception { @@ -157,7 +154,9 @@ public class ChannelsTests extends ESTestCase { copy.flip(); BytesReference sourceRef = new BytesArray(randomBytes, offset + offsetToRead, lengthToRead); - BytesReference copyRef = new ByteBufferBytesReference(copy); + byte[] tmp = new byte[copy.remaining()]; + copy.duplicate().get(tmp); + BytesReference copyRef = new BytesArray(tmp); assertTrue("read bytes didn't match written bytes", sourceRef.equals(copyRef)); } diff --git a/core/src/test/java/org/elasticsearch/common/PriorityTests.java b/core/src/test/java/org/elasticsearch/common/PriorityTests.java new file mode 100644 index 00000000000..06bbab6bf58 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/PriorityTests.java @@ -0,0 +1,97 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class PriorityTests extends ESTestCase { + + public void testValueOf() { + for (Priority p : Priority.values()) { + assertSame(p, Priority.valueOf(p.toString())); + } + + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> { + Priority.valueOf("foobar"); + }); + assertEquals("No enum constant org.elasticsearch.common.Priority.foobar", exception.getMessage()); + } + + public void testToString() { + assertEquals("IMMEDIATE", Priority.IMMEDIATE.toString()); + assertEquals("HIGH", Priority.HIGH.toString()); + assertEquals("LANGUID", Priority.LANGUID.toString()); + assertEquals("LOW", Priority.LOW.toString()); + assertEquals("URGENT", Priority.URGENT.toString()); + assertEquals("NORMAL", Priority.NORMAL.toString()); + assertEquals(6, Priority.values().length); + } + + public void testSerialization() throws IOException { + for (Priority p : Priority.values()) { + BytesStreamOutput out = new BytesStreamOutput(); + Priority.writeTo(p, out); + Priority priority = Priority.readFrom(out.bytes().streamInput()); + assertSame(p, priority); + } + assertSame(Priority.IMMEDIATE, Priority.fromByte((byte) 0)); + assertSame(Priority.HIGH, Priority.fromByte((byte) 2)); + assertSame(Priority.LANGUID, Priority.fromByte((byte) 5)); + assertSame(Priority.LOW, Priority.fromByte((byte) 4)); + assertSame(Priority.NORMAL, Priority.fromByte((byte) 3)); + assertSame(Priority.URGENT,Priority.fromByte((byte) 1)); + assertEquals(6, Priority.values().length); + } + + public void testCompareTo() { + assertTrue(Priority.IMMEDIATE.compareTo(Priority.URGENT) < 0); + assertTrue(Priority.URGENT.compareTo(Priority.HIGH) < 0); + assertTrue(Priority.HIGH.compareTo(Priority.NORMAL) < 0); + assertTrue(Priority.NORMAL.compareTo(Priority.LOW) < 0); + assertTrue(Priority.LOW.compareTo(Priority.LANGUID) < 0); + + assertTrue(Priority.URGENT.compareTo(Priority.IMMEDIATE) > 0); + assertTrue(Priority.HIGH.compareTo(Priority.URGENT) > 0); + assertTrue(Priority.NORMAL.compareTo(Priority.HIGH) > 0); + assertTrue(Priority.LOW.compareTo(Priority.NORMAL) > 0); + assertTrue(Priority.LANGUID.compareTo(Priority.LOW) > 0); + + for (Priority p : Priority.values()) { + assertEquals(0, p.compareTo(p)); + } + List shuffeledAndSorted = Arrays.asList(Priority.values()); + Collections.shuffle(shuffeledAndSorted, random()); + Collections.sort(shuffeledAndSorted); + for (List priorities : Arrays.asList(shuffeledAndSorted, + Arrays.asList(Priority.values()))) { // #values() guarantees order! + assertSame(Priority.IMMEDIATE, priorities.get(0)); + assertSame(Priority.URGENT, priorities.get(1)); + assertSame(Priority.HIGH, priorities.get(2)); + assertSame(Priority.NORMAL, priorities.get(3)); + assertSame(Priority.LOW, priorities.get(4)); + assertSame(Priority.LANGUID, priorities.get(5)); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java b/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java index bb9d23db1cb..f10a0da3029 100644 --- a/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java +++ b/core/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java @@ -43,7 +43,7 @@ public class MemoryCircuitBreakerTests extends ESTestCase { final int BYTES_PER_THREAD = scaledRandomIntBetween(500, 4500); final Thread[] threads = new Thread[NUM_THREADS]; final AtomicBoolean tripped = new AtomicBoolean(false); - final AtomicReference lastException = new AtomicReference<>(null); + final AtomicReference lastException = new AtomicReference<>(null); final MemoryCircuitBreaker breaker = new MemoryCircuitBreaker(new ByteSizeValue((BYTES_PER_THREAD * NUM_THREADS) - 1), 1.0, logger); @@ -60,8 +60,8 @@ public class MemoryCircuitBreakerTests extends ESTestCase { } else { assertThat(tripped.compareAndSet(false, true), equalTo(true)); } - } catch (Throwable e2) { - lastException.set(e2); + } catch (Exception e) { + lastException.set(e); } } } @@ -117,8 +117,8 @@ public class MemoryCircuitBreakerTests extends ESTestCase { } else { assertThat(tripped.compareAndSet(false, true), equalTo(true)); } - } catch (Throwable e2) { - lastException.set(e2); + } catch (Exception e) { + lastException.set(e); } } } @@ -178,8 +178,8 @@ public class MemoryCircuitBreakerTests extends ESTestCase { breaker.addEstimateBytesAndMaybeBreak(1L, "test"); } catch (CircuitBreakingException e) { tripped.incrementAndGet(); - } catch (Throwable e2) { - lastException.set(e2); + } catch (Exception e) { + lastException.set(e); } } } diff --git a/core/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java b/core/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java index 61d24ef44c3..fff030200b7 100644 --- a/core/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java +++ b/core/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java @@ -32,10 +32,28 @@ public class BytesArrayTests extends AbstractBytesReferenceTestCase { out.writeByte((byte) random().nextInt(1 << 8)); } assertEquals(length, out.size()); - BytesArray ref = out.bytes().toBytesArray(); + BytesArray ref = new BytesArray(out.bytes().toBytesRef()); assertEquals(length, ref.length()); assertTrue(ref instanceof BytesArray); assertThat(ref.length(), Matchers.equalTo(length)); return ref; } + + public void testArray() throws IOException { + int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))}; + + for (int i = 0; i < sizes.length; i++) { + BytesArray pbr = (BytesArray) newBytesReference(sizes[i]); + byte[] array = pbr.array(); + assertNotNull(array); + assertEquals(sizes[i], array.length); + assertSame(array, pbr.array()); + } + } + + public void testArrayOffset() throws IOException { + int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5)); + BytesArray pbr = (BytesArray) newBytesReference(length); + assertEquals(0, pbr.offset()); + } } diff --git a/core/src/test/java/org/elasticsearch/common/bytes/BytesReferenceTests.java b/core/src/test/java/org/elasticsearch/common/bytes/BytesReferenceTests.java deleted file mode 100644 index 60f4983dd19..00000000000 --- a/core/src/test/java/org/elasticsearch/common/bytes/BytesReferenceTests.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.bytes; - - -import org.elasticsearch.test.ESTestCase; - -import java.util.Arrays; - -public class BytesReferenceTests extends ESTestCase { - - public void testEquals() { - final int len = randomIntBetween(0, randomBoolean() ? 10: 100000); - final int offset1 = randomInt(5); - final byte[] array1 = new byte[offset1 + len + randomInt(5)]; - random().nextBytes(array1); - final int offset2 = randomInt(offset1); - final byte[] array2 = Arrays.copyOfRange(array1, offset1 - offset2, array1.length); - - final BytesArray b1 = new BytesArray(array1, offset1, len); - final BytesArray b2 = new BytesArray(array2, offset2, len); - assertTrue(BytesReference.Helper.bytesEqual(b1, b2)); - assertTrue(BytesReference.Helper.bytesEquals(b1, b2)); - assertEquals(Arrays.hashCode(b1.toBytes()), b1.hashCode()); - assertEquals(BytesReference.Helper.bytesHashCode(b1), BytesReference.Helper.slowHashCode(b2)); - - // test same instance - assertTrue(BytesReference.Helper.bytesEqual(b1, b1)); - assertTrue(BytesReference.Helper.bytesEquals(b1, b1)); - assertEquals(BytesReference.Helper.bytesHashCode(b1), BytesReference.Helper.slowHashCode(b1)); - - if (len > 0) { - // test different length - BytesArray differentLen = new BytesArray(array1, offset1, randomInt(len - 1)); - assertFalse(BytesReference.Helper.bytesEqual(b1, differentLen)); - - // test changed bytes - array1[offset1 + randomInt(len - 1)] += 13; - assertFalse(BytesReference.Helper.bytesEqual(b1, b2)); - assertFalse(BytesReference.Helper.bytesEquals(b1, b2)); - } - } - -} diff --git a/core/src/test/java/org/elasticsearch/common/bytes/CompositeBytesReferenceTests.java b/core/src/test/java/org/elasticsearch/common/bytes/CompositeBytesReferenceTests.java new file mode 100644 index 00000000000..aec957aba68 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/bytes/CompositeBytesReferenceTests.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.bytes; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.BytesRefIterator; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class CompositeBytesReferenceTests extends AbstractBytesReferenceTestCase { + @Override + protected BytesReference newBytesReference(int length) throws IOException { + // we know bytes stream output always creates a paged bytes reference, we use it to create randomized content + List referenceList = newRefList(length); + BytesReference ref = new CompositeBytesReference(referenceList.toArray(new BytesReference[0])); + assertEquals(length, ref.length()); + return ref; + } + + private List newRefList(int length) throws IOException { + List referenceList = new ArrayList<>(); + for (int i = 0; i < length;) { + int remaining = length-i; + int sliceLength = randomIntBetween(1, remaining); + ReleasableBytesStreamOutput out = new ReleasableBytesStreamOutput(sliceLength, bigarrays); + for (int j = 0; j < sliceLength; j++) { + out.writeByte((byte) random().nextInt(1 << 8)); + } + assertEquals(sliceLength, out.size()); + referenceList.add(out.bytes()); + i+=sliceLength; + } + return referenceList; + } + + public void testCompositeBuffer() throws IOException { + List referenceList = newRefList(randomIntBetween(1, PAGE_SIZE * 2)); + BytesReference ref = new CompositeBytesReference(referenceList.toArray(new BytesReference[0])); + BytesRefIterator iterator = ref.iterator(); + BytesRefBuilder builder = new BytesRefBuilder(); + + for (BytesReference reference : referenceList) { + BytesRefIterator innerIter = reference.iterator(); // sometimes we have a paged ref - pull an iter and walk all pages! + BytesRef scratch; + while ((scratch = innerIter.next()) != null) { + BytesRef next = iterator.next(); + assertNotNull(next); + assertEquals(next, scratch); + builder.append(next); + } + + } + assertNull(iterator.next()); + + int offset = 0; + for (BytesReference reference : referenceList) { + assertEquals(reference, ref.slice(offset, reference.length())); + int probes = randomIntBetween(Math.min(10, reference.length()), reference.length()); + for (int i = 0; i < probes; i++) { + int index = randomIntBetween(0, reference.length()-1); + assertEquals(ref.get(offset + index), reference.get(index)); + } + offset += reference.length(); + } + + BytesArray array = new BytesArray(builder.toBytesRef()); + assertEquals(array, ref); + assertEquals(array.hashCode(), ref.hashCode()); + + BytesStreamOutput output = new BytesStreamOutput(); + ref.writeTo(output); + assertEquals(array, output.bytes()); + } + + @Override + public void testToBytesRefSharedPage() throws IOException { + // CompositeBytesReference doesn't share pages + } + + @Override + public void testSliceArrayOffset() throws IOException { + // the assertions in this test only work on no-composite buffers + } + + @Override + public void testSliceToBytesRef() throws IOException { + // CompositeBytesReference shifts offsets + } +} diff --git a/core/src/test/java/org/elasticsearch/common/bytes/PagedBytesReferenceTests.java b/core/src/test/java/org/elasticsearch/common/bytes/PagedBytesReferenceTests.java index 5a299d82de8..6ae2b3cf943 100644 --- a/core/src/test/java/org/elasticsearch/common/bytes/PagedBytesReferenceTests.java +++ b/core/src/test/java/org/elasticsearch/common/bytes/PagedBytesReferenceTests.java @@ -50,15 +50,15 @@ public class PagedBytesReferenceTests extends AbstractBytesReferenceTestCase { return ref; } - public void testToBytesArrayMaterializedPages() throws IOException { + public void testToBytesRefMaterializedPages() throws IOException { // we need a length != (n * pagesize) to avoid page sharing at boundaries int length = 0; while ((length % PAGE_SIZE) == 0) { length = randomIntBetween(PAGE_SIZE, PAGE_SIZE * randomIntBetween(2, 5)); } BytesReference pbr = newBytesReference(length); - BytesArray ba = pbr.toBytesArray(); - BytesArray ba2 = pbr.toBytesArray(); + BytesArray ba = new BytesArray(pbr.toBytesRef()); + BytesArray ba2 = new BytesArray(pbr.toBytesRef()); assertNotNull(ba); assertNotNull(ba2); assertEquals(pbr.length(), ba.length()); @@ -67,23 +67,23 @@ public class PagedBytesReferenceTests extends AbstractBytesReferenceTestCase { assertNotSame(ba.array(), ba2.array()); } - public void testArray() throws IOException { + public void testSinglePage() throws IOException { int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))}; for (int i = 0; i < sizes.length; i++) { BytesReference pbr = newBytesReference(sizes[i]); // verify that array() is cheap for small payloads if (sizes[i] <= PAGE_SIZE) { - byte[] array = pbr.array(); + BytesRef page = getSinglePageOrNull(pbr); + assertNotNull(page); + byte[] array = page.bytes; assertNotNull(array); assertEquals(sizes[i], array.length); - assertSame(array, pbr.array()); + assertSame(array, page.bytes); } else { - try { - pbr.array(); - fail("expected IllegalStateException"); - } catch (IllegalStateException isx) { - // expected + BytesRef page = getSinglePageOrNull(pbr); + if (pbr.length() > 0) { + assertNull(page); } } } @@ -94,22 +94,42 @@ public class PagedBytesReferenceTests extends AbstractBytesReferenceTestCase { for (int i = 0; i < sizes.length; i++) { BytesReference pbr = newBytesReference(sizes[i]); - byte[] bytes = pbr.toBytes(); + byte[] bytes = BytesReference.toBytes(pbr); assertEquals(sizes[i], bytes.length); // verify that toBytes() is cheap for small payloads if (sizes[i] <= PAGE_SIZE) { - assertSame(bytes, pbr.toBytes()); + assertSame(bytes, BytesReference.toBytes(pbr)); } else { - assertNotSame(bytes, pbr.toBytes()); + assertNotSame(bytes, BytesReference.toBytes(pbr)); } } } - public void testHasArray() throws IOException { + public void testHasSinglePage() throws IOException { int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(1, 3)); BytesReference pbr = newBytesReference(length); // must return true for <= pagesize - assertEquals(length <= PAGE_SIZE, pbr.hasArray()); + assertEquals(length <= PAGE_SIZE, getNumPages(pbr) == 1); + } + + public void testEquals() { + int length = randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5)); + ByteArray ba1 = bigarrays.newByteArray(length, false); + ByteArray ba2 = bigarrays.newByteArray(length, false); + + // copy contents + for (long i = 0; i < length; i++) { + ba2.set(i, ba1.get(i)); + } + + // get refs & compare + BytesReference pbr = new PagedBytesReference(bigarrays, ba1, length); + BytesReference pbr2 = new PagedBytesReference(bigarrays, ba2, length); + assertEquals(pbr, pbr2); + int offsetToFlip = randomIntBetween(0, length - 1); + int value = ~Byte.toUnsignedInt(ba1.get(offsetToFlip)); + ba2.set(offsetToFlip, (byte)value); + assertNotEquals(pbr, pbr2); } } diff --git a/core/src/test/java/org/elasticsearch/common/compress/DeflateCompressedXContentTests.java b/core/src/test/java/org/elasticsearch/common/compress/DeflateCompressedXContentTests.java index 72866d082ae..0ce95077965 100644 --- a/core/src/test/java/org/elasticsearch/common/compress/DeflateCompressedXContentTests.java +++ b/core/src/test/java/org/elasticsearch/common/compress/DeflateCompressedXContentTests.java @@ -91,8 +91,8 @@ public class DeflateCompressedXContentTests extends ESTestCase { // of different size are being used assertFalse(b1.equals(b2)); // we used the compressed representation directly and did not recompress - assertArrayEquals(b1.toBytes(), new CompressedXContent(b1).compressed()); - assertArrayEquals(b2.toBytes(), new CompressedXContent(b2).compressed()); + assertArrayEquals(BytesReference.toBytes(b1), new CompressedXContent(b1).compressed()); + assertArrayEquals(BytesReference.toBytes(b2), new CompressedXContent(b2).compressed()); // but compressedstring instances are still equal assertEquals(new CompressedXContent(b1), new CompressedXContent(b2)); } diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java index 407c9790dbe..416299f8e7e 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java @@ -46,7 +46,7 @@ public class GeoDistanceTests extends ESTestCase { GeoDistance geoDistance = randomFrom(GeoDistance.PLANE, GeoDistance.FACTOR, GeoDistance.ARC, GeoDistance.SLOPPY_ARC); try (BytesStreamOutput out = new BytesStreamOutput()) { geoDistance.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) {; + try (StreamInput in = out.bytes().streamInput()) {; GeoDistance copy = GeoDistance.readFromStream(in); assertEquals(copy.toString() + " vs. " + geoDistance.toString(), copy, geoDistance); } @@ -60,7 +60,7 @@ public class GeoDistanceTests extends ESTestCase { } else { out.writeVInt(randomIntBetween(Integer.MIN_VALUE, -1)); } - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { GeoDistance.readFromStream(in); } catch (IOException e) { assertThat(e.getMessage(), containsString("Unknown GeoDistance ordinal [")); diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java index 413062e4a2d..d2ae8401c55 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.common.geo; +import org.apache.lucene.geo.Rectangle; import org.elasticsearch.test.ESTestCase; /** @@ -57,4 +58,16 @@ public class GeoHashTests extends ESTestCase { } } } + + public void testBboxFromHash() { + String hash = randomGeohash(1, 12); + int level = hash.length(); + Rectangle bbox = GeoHashUtils.bbox(hash); + // check that the length is as expected + double expectedLonDiff = 360.0 / (Math.pow(8.0, (level + 1) / 2) * Math.pow(4.0, level / 2)); + double expectedLatDiff = 180.0 / (Math.pow(4.0, (level + 1) / 2) * Math.pow(8.0, level / 2)); + assertEquals(expectedLonDiff, bbox.maxLon - bbox.minLon, 0.00001); + assertEquals(expectedLatDiff, bbox.maxLat - bbox.minLat, 0.00001); + assertEquals(hash, GeoHashUtils.stringEncode(bbox.minLon, bbox.minLat, level)); + } } diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java index 566d2148cae..76376a4d30d 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java @@ -56,7 +56,7 @@ import static org.elasticsearch.common.geo.builders.ShapeBuilder.SPATIAL_CONTEXT */ public class GeoJSONShapeParserTests extends ESTestCase { - private final static GeometryFactory GEOMETRY_FACTORY = SPATIAL_CONTEXT.getGeometryFactory(); + private static final GeometryFactory GEOMETRY_FACTORY = SPATIAL_CONTEXT.getGeometryFactory(); public void testParse_simplePoint() throws IOException { String pointGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Point") diff --git a/core/src/test/java/org/elasticsearch/common/geo/ShapeRelationTests.java b/core/src/test/java/org/elasticsearch/common/geo/ShapeRelationTests.java index 6ee6a4fff83..e4eaa17874c 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/ShapeRelationTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/ShapeRelationTests.java @@ -39,21 +39,21 @@ public class ShapeRelationTests extends ESTestCase { public void testwriteTo() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { ShapeRelation.INTERSECTS.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { ShapeRelation.DISJOINT.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(1)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { ShapeRelation.WITHIN.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(2)); } } @@ -62,19 +62,19 @@ public class ShapeRelationTests extends ESTestCase { public void testReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(0); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(ShapeRelation.readFromStream(in), equalTo(ShapeRelation.INTERSECTS)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(1); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(ShapeRelation.readFromStream(in), equalTo(ShapeRelation.DISJOINT)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(2); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(ShapeRelation.readFromStream(in), equalTo(ShapeRelation.WITHIN)); } } @@ -83,7 +83,7 @@ public class ShapeRelationTests extends ESTestCase { public void testInvalidReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(randomIntBetween(3, Integer.MAX_VALUE)); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { ShapeRelation.readFromStream(in); fail("Expected IOException"); } catch(IOException e) { diff --git a/core/src/test/java/org/elasticsearch/common/geo/SpatialStrategyTests.java b/core/src/test/java/org/elasticsearch/common/geo/SpatialStrategyTests.java index c2f29e6ecd7..b6eae97932f 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/SpatialStrategyTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/SpatialStrategyTests.java @@ -38,14 +38,14 @@ public class SpatialStrategyTests extends ESTestCase { public void testwriteTo() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { SpatialStrategy.TERM.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { SpatialStrategy.RECURSIVE.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(1)); } } @@ -54,13 +54,13 @@ public class SpatialStrategyTests extends ESTestCase { public void testReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(0); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(SpatialStrategy.readFromStream(in), equalTo(SpatialStrategy.TERM)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(1); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(SpatialStrategy.readFromStream(in), equalTo(SpatialStrategy.RECURSIVE)); } } @@ -69,7 +69,7 @@ public class SpatialStrategyTests extends ESTestCase { public void testInvalidReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE)); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { SpatialStrategy.readFromStream(in); fail("Expected IOException"); } catch(IOException e) { diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index 9cbd4bb769d..4003a96e26f 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -137,7 +137,7 @@ public abstract class AbstractShapeBuilderTestCase exte static ShapeBuilder copyShape(ShapeBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return namedWriteableRegistry.getReader(ShapeBuilder.class, original.getWriteableName()).read(in); } } diff --git a/core/src/test/java/org/elasticsearch/common/io/StreamsTests.java b/core/src/test/java/org/elasticsearch/common/io/StreamsTests.java index 5c6c1e1789b..76b52c08a85 100644 --- a/core/src/test/java/org/elasticsearch/common/io/StreamsTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/StreamsTests.java @@ -84,7 +84,7 @@ public class StreamsTests extends ESTestCase { byte stuff[] = new byte[] { 0, 1, 2, 3 }; BytesRef stuffRef = new BytesRef(stuff, 2, 2); BytesArray stuffArray = new BytesArray(stuffRef); - StreamInput input = StreamInput.wrap(stuffArray); + StreamInput input = stuffArray.streamInput(); assertEquals(2, input.read()); assertEquals(3, input.read()); assertEquals(-1, input.read()); diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/AbstractWriteableEnumTestCase.java b/core/src/test/java/org/elasticsearch/common/io/stream/AbstractWriteableEnumTestCase.java index a4d15173a7c..dc57b0c70d4 100644 --- a/core/src/test/java/org/elasticsearch/common/io/stream/AbstractWriteableEnumTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/io/stream/AbstractWriteableEnumTestCase.java @@ -60,7 +60,7 @@ public abstract class AbstractWriteableEnumTestCase extends ESTestCase { protected static void assertWriteToStream(final Writeable writeableEnum, final int ordinal) throws IOException { try (BytesStreamOutput out = new BytesStreamOutput()) { writeableEnum.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(ordinal)); } } @@ -70,7 +70,7 @@ public abstract class AbstractWriteableEnumTestCase extends ESTestCase { protected void assertReadFromStream(final int ordinal, final Writeable expected) throws IOException { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(ordinal); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(reader.read(in), equalTo(expected)); } } diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java index 9fcbb708156..dcd612198de 100644 --- a/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/stream/BytesStreamsTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.io.stream; import org.apache.lucene.util.Constants; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.util.BigArrays; @@ -29,7 +30,9 @@ import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Objects; import static org.hamcrest.Matchers.closeTo; @@ -48,7 +51,7 @@ public class BytesStreamsTests extends ESTestCase { // test empty stream to array assertEquals(0, out.size()); - assertEquals(0, out.bytes().toBytes().length); + assertEquals(0, out.bytes().length()); out.close(); } @@ -63,7 +66,7 @@ public class BytesStreamsTests extends ESTestCase { // write single byte out.writeByte(expectedData[0]); assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -80,7 +83,7 @@ public class BytesStreamsTests extends ESTestCase { } assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -108,14 +111,14 @@ public class BytesStreamsTests extends ESTestCase { byte[] expectedData = randomizedByteArrayWithSize(expectedSize); out.writeBytes(expectedData); assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); // bulk-write again with actual bytes expectedSize = 10; expectedData = randomizedByteArrayWithSize(expectedSize); out.writeBytes(expectedData); assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -130,7 +133,7 @@ public class BytesStreamsTests extends ESTestCase { out.writeBytes(expectedData); assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -149,7 +152,7 @@ public class BytesStreamsTests extends ESTestCase { // now write the rest - more than fits into the remaining first page out.writeBytes(expectedData, initialOffset, additionalLength); assertEquals(expectedData.length, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -168,7 +171,7 @@ public class BytesStreamsTests extends ESTestCase { // ie. we cross over into a third out.writeBytes(expectedData, initialOffset, additionalLength); assertEquals(expectedData.length, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -185,7 +188,7 @@ public class BytesStreamsTests extends ESTestCase { } assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -202,7 +205,7 @@ public class BytesStreamsTests extends ESTestCase { } assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -219,7 +222,7 @@ public class BytesStreamsTests extends ESTestCase { } assertEquals(expectedSize, out.size()); - assertArrayEquals(expectedData, out.bytes().toBytes()); + assertArrayEquals(expectedData, BytesReference.toBytes(out.bytes())); out.close(); } @@ -235,7 +238,7 @@ public class BytesStreamsTests extends ESTestCase { out.seek(position += BigArrays.BYTE_PAGE_SIZE + 10); out.seek(position += BigArrays.BYTE_PAGE_SIZE * 2); assertEquals(position, out.position()); - assertEquals(position, out.bytes().toBytes().length); + assertEquals(position, BytesReference.toBytes(out.bytes()).length); out.close(); } @@ -263,6 +266,7 @@ public class BytesStreamsTests extends ESTestCase { out.writeVInt(2); out.writeLong(-3); out.writeVLong(4); + out.writeOptionalLong(11234234L); out.writeFloat(1.1f); out.writeDouble(2.2); int[] intArray = {1, 2, 3}; @@ -288,16 +292,17 @@ public class BytesStreamsTests extends ESTestCase { out.writeTimeZone(DateTimeZone.forID("CET")); out.writeOptionalTimeZone(DateTimeZone.getDefault()); out.writeOptionalTimeZone(null); - final byte[] bytes = out.bytes().toBytes(); - StreamInput in = StreamInput.wrap(out.bytes().toBytes()); + final byte[] bytes = BytesReference.toBytes(out.bytes()); + StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); assertEquals(in.available(), bytes.length); assertThat(in.readBoolean(), equalTo(false)); assertThat(in.readByte(), equalTo((byte)1)); assertThat(in.readShort(), equalTo((short)-1)); assertThat(in.readInt(), equalTo(-1)); assertThat(in.readVInt(), equalTo(2)); - assertThat(in.readLong(), equalTo((long)-3)); - assertThat(in.readVLong(), equalTo((long)4)); + assertThat(in.readLong(), equalTo(-3L)); + assertThat(in.readVLong(), equalTo(4L)); + assertThat(in.readOptionalLong(), equalTo(11234234L)); assertThat((double)in.readFloat(), closeTo(1.1, 0.0001)); assertThat(in.readDouble(), closeTo(2.2, 0.0001)); assertThat(in.readGenericValue(), equalTo((Object) intArray)); @@ -328,12 +333,30 @@ public class BytesStreamsTests extends ESTestCase { namedWriteableRegistry.register(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new); TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); out.writeNamedWriteable(namedWriteableIn); - byte[] bytes = out.bytes().toBytes(); + byte[] bytes = BytesReference.toBytes(out.bytes()); StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry); assertEquals(in.available(), bytes.length); BaseNamedWriteable namedWriteableOut = in.readNamedWriteable(BaseNamedWriteable.class); - assertEquals(namedWriteableOut, namedWriteableIn); - assertEquals(in.available(), 0); + assertEquals(namedWriteableIn, namedWriteableOut); + assertEquals(0, in.available()); + } + + public void testNamedWriteableList() throws IOException { + NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); + namedWriteableRegistry.register(BaseNamedWriteable.class, TestNamedWriteable.NAME, TestNamedWriteable::new); + int size = between(0, 100); + List expected = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + expected.add(new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeNamedWriteableList(expected); + try (StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), namedWriteableRegistry)) { + assertEquals(expected, in.readNamedWriteableList(BaseNamedWriteable.class)); + assertEquals(0, in.available()); + } + } } public void testNamedWriteableDuplicates() throws IOException { @@ -348,7 +371,7 @@ public class BytesStreamsTests extends ESTestCase { public void testNamedWriteableUnknownCategory() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); out.writeNamedWriteable(new TestNamedWriteable("test1", "test2")); - StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes().toBytes()), new NamedWriteableRegistry()); + StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), new NamedWriteableRegistry()); //no named writeable registered with given name, can write but cannot read it back IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class)); assertThat(e.getMessage(), equalTo("unknown named writeable category [" + BaseNamedWriteable.class.getName() + "]")); @@ -368,7 +391,7 @@ public class BytesStreamsTests extends ESTestCase { public void writeTo(StreamOutput out) throws IOException { } }); - StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes().toBytes()), namedWriteableRegistry); + StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(BytesReference.toBytes(out.bytes())), namedWriteableRegistry); try { //no named writeable registered with given name under test category, can write but cannot read it back in.readNamedWriteable(BaseNamedWriteable.class); @@ -382,7 +405,7 @@ public class BytesStreamsTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); TestNamedWriteable testNamedWriteable = new TestNamedWriteable("test1", "test2"); out.writeNamedWriteable(testNamedWriteable); - StreamInput in = StreamInput.wrap(out.bytes().toBytes()); + StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); try { in.readNamedWriteable(BaseNamedWriteable.class); fail("Expected UnsupportedOperationException"); @@ -397,7 +420,7 @@ public class BytesStreamsTests extends ESTestCase { namedWriteableRegistry.register(BaseNamedWriteable.class, TestNamedWriteable.NAME, (StreamInput in) -> null); TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); out.writeNamedWriteable(namedWriteableIn); - byte[] bytes = out.bytes().toBytes(); + byte[] bytes = BytesReference.toBytes(out.bytes()); StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry); assertEquals(in.available(), bytes.length); IOException e = expectThrows(IOException.class, () -> in.readNamedWriteable(BaseNamedWriteable.class)); @@ -407,7 +430,7 @@ public class BytesStreamsTests extends ESTestCase { public void testOptionalWriteableReaderReturnsNull() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); out.writeOptionalWriteable(new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10))); - StreamInput in = StreamInput.wrap(out.bytes().toBytes()); + StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); IOException e = expectThrows(IOException.class, () -> in.readOptionalWriteable((StreamInput ignored) -> null)); assertThat(e.getMessage(), endsWith("] returned null which is not allowed and probably means it screwed up the stream.")); } @@ -423,7 +446,7 @@ public class BytesStreamsTests extends ESTestCase { }); TestNamedWriteable namedWriteableIn = new TestNamedWriteable(randomAsciiOfLengthBetween(1, 10), randomAsciiOfLengthBetween(1, 10)); out.writeNamedWriteable(namedWriteableIn); - byte[] bytes = out.bytes().toBytes(); + byte[] bytes = BytesReference.toBytes(out.bytes()); StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(bytes), namedWriteableRegistry); assertEquals(in.available(), bytes.length); AssertionError e = expectThrows(AssertionError.class, () -> in.readNamedWriteable(BaseNamedWriteable.class)); @@ -442,9 +465,9 @@ public class BytesStreamsTests extends ESTestCase { final BytesStreamOutput out = new BytesStreamOutput(); out.writeStreamableList(expected); - final StreamInput in = StreamInput.wrap(out.bytes().toBytes()); + final StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); - List loaded = in.readStreamableList(TestStreamable::new); + final List loaded = in.readStreamableList(TestStreamable::new); assertThat(loaded, hasSize(expected.size())); @@ -458,7 +481,49 @@ public class BytesStreamsTests extends ESTestCase { out.close(); } - private static abstract class BaseNamedWriteable implements NamedWriteable { + public void testWriteMapOfLists() throws IOException { + final int size = randomIntBetween(0, 5); + final Map> expected = new HashMap<>(size); + + for (int i = 0; i < size; ++i) { + int listSize = randomIntBetween(0, 5); + List list = new ArrayList<>(listSize); + + for (int j = 0; j < listSize; ++j) { + list.add(randomAsciiOfLength(5)); + } + + expected.put(randomAsciiOfLength(2), list); + } + + final BytesStreamOutput out = new BytesStreamOutput(); + out.writeMapOfLists(expected); + + final StreamInput in = StreamInput.wrap(BytesReference.toBytes(out.bytes())); + + final Map> loaded = in.readMapOfLists(); + + assertThat(loaded.size(), equalTo(expected.size())); + + for (Map.Entry> entry : expected.entrySet()) { + assertThat(loaded.containsKey(entry.getKey()), equalTo(true)); + + List loadedList = loaded.get(entry.getKey()); + + assertThat(loadedList, hasSize(entry.getValue().size())); + + for (int i = 0; i < loadedList.size(); ++i) { + assertEquals(entry.getValue().get(i), loadedList.get(i)); + } + } + + assertEquals(0, in.available()); + + in.close(); + out.close(); + } + + private abstract static class BaseNamedWriteable implements NamedWriteable { } @@ -537,7 +602,7 @@ public class BytesStreamsTests extends ESTestCase { // toByteArray() must fail try { - out.bytes().toBytes(); + BytesReference.toBytes(out.bytes()); fail("expected IllegalStateException: stream closed"); } catch (IllegalStateException iex1) { @@ -558,7 +623,7 @@ public class BytesStreamsTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble()); out.writeGenericValue(geoPoint); - StreamInput wrap = StreamInput.wrap(out.bytes()); + StreamInput wrap = out.bytes().streamInput(); GeoPoint point = (GeoPoint) wrap.readGenericValue(); assertEquals(point, geoPoint); } @@ -566,7 +631,7 @@ public class BytesStreamsTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); GeoPoint geoPoint = new GeoPoint(randomDouble(), randomDouble()); out.writeGeoPoint(geoPoint); - StreamInput wrap = StreamInput.wrap(out.bytes()); + StreamInput wrap = out.bytes().streamInput(); GeoPoint point = wrap.readGeoPoint(); assertEquals(point, geoPoint); } diff --git a/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java b/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java index aa6016774b0..06d39398c8e 100644 --- a/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java +++ b/core/src/test/java/org/elasticsearch/common/io/stream/StreamTests.java @@ -19,13 +19,13 @@ package org.elasticsearch.common.io.stream; -import org.elasticsearch.common.bytes.ByteBufferBytesReference; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.test.ESTestCase; import java.io.ByteArrayInputStream; import java.io.IOException; -import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -61,8 +61,8 @@ public class StreamTests extends ESTestCase { for (Tuple value : values) { BytesStreamOutput out = new BytesStreamOutput(); out.writeZLong(value.v1()); - assertArrayEquals(Long.toString(value.v1()), value.v2(), out.bytes().toBytes()); - ByteBufferBytesReference bytes = new ByteBufferBytesReference(ByteBuffer.wrap(value.v2())); + assertArrayEquals(Long.toString(value.v1()), value.v2(), BytesReference.toBytes(out.bytes())); + BytesReference bytes = new BytesArray(value.v2()); assertEquals(Arrays.toString(value.v2()), (long)value.v1(), bytes.streamInput().readZLong()); } } @@ -143,7 +143,7 @@ public class StreamTests extends ESTestCase { assertThat(targetArray, equalTo(sourceArray)); } - final static class WriteableString implements Writeable { + static final class WriteableString implements Writeable { final String string; public WriteableString(String string) { diff --git a/core/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java b/core/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java new file mode 100644 index 00000000000..f75e73ced2c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/logging/DeprecationLoggerTests.java @@ -0,0 +1,155 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.logging; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; + +/** + * Tests {@link DeprecationLogger} + */ +public class DeprecationLoggerTests extends ESTestCase { + + private final DeprecationLogger logger = new DeprecationLogger(Loggers.getLogger(getClass())); + + public void testAddsHeaderWithThreadContext() throws IOException { + String msg = "A simple message [{}]"; + String param = randomAsciiOfLengthBetween(1, 5); + String formatted = LoggerMessageFormat.format(msg, (Object)param); + + try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) { + Set threadContexts = Collections.singleton(threadContext); + + logger.deprecated(threadContexts, msg, param); + + Map> responseHeaders = threadContext.getResponseHeaders(); + + assertEquals(1, responseHeaders.size()); + assertEquals(formatted, responseHeaders.get(DeprecationLogger.DEPRECATION_HEADER).get(0)); + } + } + + public void testAddsCombinedHeaderWithThreadContext() throws IOException { + String msg = "A simple message [{}]"; + String param = randomAsciiOfLengthBetween(1, 5); + String formatted = LoggerMessageFormat.format(msg, (Object)param); + String formatted2 = randomAsciiOfLengthBetween(1, 10); + + try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) { + Set threadContexts = Collections.singleton(threadContext); + + logger.deprecated(threadContexts, msg, param); + logger.deprecated(threadContexts, formatted2); + + Map> responseHeaders = threadContext.getResponseHeaders(); + + assertEquals(1, responseHeaders.size()); + + List responses = responseHeaders.get(DeprecationLogger.DEPRECATION_HEADER); + + assertEquals(2, responses.size()); + assertEquals(formatted, responses.get(0)); + assertEquals(formatted2, responses.get(1)); + } + } + + public void testCanRemoveThreadContext() throws IOException { + final String expected = "testCanRemoveThreadContext"; + final String unexpected = "testCannotRemoveThreadContext"; + + try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) { + // NOTE: by adding it to the logger, we allow any concurrent test to write to it (from their own threads) + DeprecationLogger.setThreadContext(threadContext); + + logger.deprecated(expected); + + Map> responseHeaders = threadContext.getResponseHeaders(); + List responses = responseHeaders.get(DeprecationLogger.DEPRECATION_HEADER); + + // ensure it works (note: concurrent tests may be adding to it, but in different threads, so it should have no impact) + assertThat(responses, hasSize(atLeast(1))); + assertThat(responses, hasItem(equalTo(expected))); + + DeprecationLogger.removeThreadContext(threadContext); + + logger.deprecated(unexpected); + + responseHeaders = threadContext.getResponseHeaders(); + responses = responseHeaders.get(DeprecationLogger.DEPRECATION_HEADER); + + assertThat(responses, hasSize(atLeast(1))); + assertThat(responses, hasItem(expected)); + assertThat(responses, not(hasItem(unexpected))); + } + } + + public void testIgnoresClosedThreadContext() throws IOException { + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + Set threadContexts = new HashSet<>(1); + + threadContexts.add(threadContext); + + threadContext.close(); + + logger.deprecated(threadContexts, "Ignored logger message"); + + assertTrue(threadContexts.contains(threadContext)); + } + + public void testSafeWithoutThreadContext() { + logger.deprecated(Collections.emptySet(), "Ignored"); + } + + public void testFailsWithoutThreadContextSet() { + expectThrows(NullPointerException.class, () -> logger.deprecated((Set)null, "Does not explode")); + } + + public void testFailsWhenDoubleSettingSameThreadContext() throws IOException { + try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) { + DeprecationLogger.setThreadContext(threadContext); + + try { + expectThrows(IllegalStateException.class, () -> DeprecationLogger.setThreadContext(threadContext)); + } finally { + // cleanup after ourselves + DeprecationLogger.removeThreadContext(threadContext); + } + } + } + + public void testFailsWhenRemovingUnknownThreadContext() throws IOException { + try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) { + expectThrows(IllegalStateException.class, () -> DeprecationLogger.removeThreadContext(threadContext)); + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java b/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java index fabace237b2..581a9599365 100644 --- a/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java +++ b/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java @@ -95,23 +95,6 @@ public class LoggingConfigurationTests extends ESTestCase { assertThat(logSettings.get("json"), is("foo")); } - public void testResolvePropertiesLoggingConfig() throws Exception { - Path tmpDir = createTempDir(); - Path loggingConf = tmpDir.resolve(loggingConfiguration("properties")); - Files.write(loggingConf, "key: value".getBytes(StandardCharsets.UTF_8)); - Environment environment = new Environment( - Settings.builder() - .put(Environment.PATH_CONF_SETTING.getKey(), tmpDir.toAbsolutePath()) - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .build()); - - Settings.Builder builder = Settings.builder(); - LogConfigurator.resolveConfig(environment, builder); - - Settings logSettings = builder.build(); - assertThat(logSettings.get("key"), is("value")); - } - public void testResolveYamlLoggingConfig() throws Exception { Path tmpDir = createTempDir(); Path loggingConf1 = tmpDir.resolve(loggingConfiguration("yml")); diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index 749ffa3c9d9..f5a5928c980 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.common.network; import org.elasticsearch.action.support.replication.ReplicationTask; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Table; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.ModuleTestCase; @@ -59,7 +59,7 @@ public class NetworkModuleTests extends ModuleTestCase { } } - static class FakeHttpTransport extends AbstractLifecycleComponent implements HttpServerTransport { + static class FakeHttpTransport extends AbstractLifecycleComponent implements HttpServerTransport { public FakeHttpTransport() { super(null); } @@ -87,18 +87,18 @@ public class NetworkModuleTests extends ModuleTestCase { static class FakeRestHandler extends BaseRestHandler { public FakeRestHandler() { - super(null, null); + super(null); } @Override - protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception {} + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception {} } static class FakeCatRestHandler extends AbstractCatAction { public FakeCatRestHandler() { - super(null, null, null); + super(null); } @Override - protected void doRequest(RestRequest request, RestChannel channel, Client client) {} + protected void doRequest(RestRequest request, RestChannel channel, NodeClient client) {} @Override protected void documentation(StringBuilder sb) {} @Override @@ -108,7 +108,10 @@ public class NetworkModuleTests extends ModuleTestCase { } public void testRegisterTransportService() { - Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "custom").build(); + Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_SERVICE_TYPE_KEY, "custom") + .put(NetworkModule.HTTP_ENABLED.getKey(), false) + .put(NetworkModule.TRANSPORT_TYPE_KEY, "local") + .build(); NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry()); module.registerTransportService("custom", FakeTransportService.class); assertBinding(module, TransportService.class, FakeTransportService.class); @@ -122,7 +125,9 @@ public class NetworkModuleTests extends ModuleTestCase { } public void testRegisterTransport() { - Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_TYPE_KEY, "custom").build(); + Settings settings = Settings.builder().put(NetworkModule.TRANSPORT_TYPE_KEY, "custom") + .put(NetworkModule.HTTP_ENABLED.getKey(), false) + .build(); NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry()); module.registerTransport("custom", FakeTransport.class); assertBinding(module, Transport.class, FakeTransport.class); @@ -136,7 +141,9 @@ public class NetworkModuleTests extends ModuleTestCase { } public void testRegisterHttpTransport() { - Settings settings = Settings.builder().put(NetworkModule.HTTP_TYPE_SETTING.getKey(), "custom").build(); + Settings settings = Settings.builder() + .put(NetworkModule.HTTP_TYPE_SETTING.getKey(), "custom") + .put(NetworkModule.TRANSPORT_TYPE_KEY, "local").build(); NetworkModule module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry()); module.registerHttpTransport("custom", FakeHttpTransport.class); assertBinding(module, HttpServerTransport.class, FakeHttpTransport.class); @@ -154,7 +161,8 @@ public class NetworkModuleTests extends ModuleTestCase { } // not added if http is disabled - settings = Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false).build(); + settings = Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false) + .put(NetworkModule.TRANSPORT_TYPE_KEY, "local").build(); module = new NetworkModule(new NetworkService(settings), settings, false, new NamedWriteableRegistry()); assertNotBound(module, HttpServerTransport.class); assertFalse(module.isTransportClient()); diff --git a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java index e82d37a5cf5..f9e5f6e3fbb 100644 --- a/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java +++ b/core/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.common.rounding; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.rounding.TimeZoneRounding.TimeIntervalRounding; import org.elasticsearch.common.rounding.TimeZoneRounding.TimeUnitRounding; import org.elasticsearch.common.unit.TimeValue; @@ -31,10 +32,13 @@ import org.joda.time.DateTimeConstants; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -328,29 +332,70 @@ public class TimeZoneRoundingTests extends ESTestCase { long interval = unit.toMillis(randomIntBetween(1, 365)); DateTimeZone tz = randomDateTimeZone(); TimeZoneRounding rounding = new TimeZoneRounding.TimeIntervalRounding(interval, tz); - long date = Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00 - try { - final long roundedDate = rounding.round(date); - final long nextRoundingValue = rounding.nextRoundingValue(roundedDate); - assertThat("Rounding should be idempotent", roundedDate, equalTo(rounding.round(roundedDate))); - assertThat("Rounded value smaller or equal than unrounded", roundedDate, lessThanOrEqualTo(date)); - assertThat("Values smaller than rounded value should round further down", rounding.round(roundedDate - 1), - lessThan(roundedDate)); + long mainDate = Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00 + if (randomBoolean()) { + mainDate = nastyDate(mainDate, tz, interval); + } + // check two intervals around date + long previousRoundedValue = Long.MIN_VALUE; + for (long date = mainDate - 2 * interval; date < mainDate + 2 * interval; date += interval / 2) { + try { + final long roundedDate = rounding.round(date); + final long nextRoundingValue = rounding.nextRoundingValue(roundedDate); + assertThat("Rounding should be idempotent", roundedDate, equalTo(rounding.round(roundedDate))); + assertThat("Rounded value smaller or equal than unrounded", roundedDate, lessThanOrEqualTo(date)); + assertThat("Values smaller than rounded value should round further down", rounding.round(roundedDate - 1), + lessThan(roundedDate)); + assertThat("Rounding should be >= previous rounding value", roundedDate, greaterThanOrEqualTo(previousRoundedValue)); - if (tz.isFixed()) { - assertThat("NextRounding value should be greater than date", nextRoundingValue, greaterThan(roundedDate)); - assertThat("NextRounding value should be interval from rounded value", nextRoundingValue - roundedDate, - equalTo(interval)); - assertThat("NextRounding value should be a rounded date", nextRoundingValue, - equalTo(rounding.round(nextRoundingValue))); + if (tz.isFixed()) { + assertThat("NextRounding value should be greater than date", nextRoundingValue, greaterThan(roundedDate)); + assertThat("NextRounding value should be interval from rounded value", nextRoundingValue - roundedDate, + equalTo(interval)); + assertThat("NextRounding value should be a rounded date", nextRoundingValue, + equalTo(rounding.round(nextRoundingValue))); + } + previousRoundedValue = roundedDate; + } catch (AssertionError e) { + logger.error("Rounding error at {}, timezone {}, interval: {},", new DateTime(date, tz), tz, interval); + throw e; } - } catch (AssertionError e) { - logger.error("Rounding error at {}, timezone {}, interval: {},", new DateTime(date, tz), tz, interval); - throw e; } } } + /** + * Test that rounded values are always greater or equal to last rounded value if date is increasing. + * The example covers an interval around 2011-10-30T02:10:00+01:00, time zone CET, interval: 2700000ms + */ + public void testIntervalRoundingMonotonic_CET() { + long interval = TimeUnit.MINUTES.toMillis(45); + DateTimeZone tz = DateTimeZone.forID("CET"); + TimeZoneRounding rounding = new TimeZoneRounding.TimeIntervalRounding(interval, tz); + List> expectedDates = new ArrayList>(); + // first date is the date to be rounded, second the expected result + expectedDates.add(new Tuple<>("2011-10-30T01:40:00.000+02:00", "2011-10-30T01:30:00.000+02:00")); + expectedDates.add(new Tuple<>("2011-10-30T02:02:30.000+02:00", "2011-10-30T01:30:00.000+02:00")); + expectedDates.add(new Tuple<>("2011-10-30T02:25:00.000+02:00", "2011-10-30T02:15:00.000+02:00")); + expectedDates.add(new Tuple<>("2011-10-30T02:47:30.000+02:00", "2011-10-30T02:15:00.000+02:00")); + expectedDates.add(new Tuple<>("2011-10-30T02:10:00.000+01:00", "2011-10-30T02:15:00.000+02:00")); + expectedDates.add(new Tuple<>("2011-10-30T02:32:30.000+01:00", "2011-10-30T02:15:00.000+01:00")); + expectedDates.add(new Tuple<>("2011-10-30T02:55:00.000+01:00", "2011-10-30T02:15:00.000+01:00")); + expectedDates.add(new Tuple<>("2011-10-30T03:17:30.000+01:00", "2011-10-30T03:00:00.000+01:00")); + + long previousDate = Long.MIN_VALUE; + for (Tuple dates : expectedDates) { + final long roundedDate = rounding.round(time(dates.v1())); + assertThat(roundedDate, isDate(time(dates.v2()), tz)); + assertThat(roundedDate, greaterThanOrEqualTo(previousDate)); + previousDate = roundedDate; + } + // here's what this means for interval widths + assertEquals(TimeUnit.MINUTES.toMillis(45), time("2011-10-30T02:15:00.000+02:00") - time("2011-10-30T01:30:00.000+02:00")); + assertEquals(TimeUnit.MINUTES.toMillis(60), time("2011-10-30T02:15:00.000+01:00") - time("2011-10-30T02:15:00.000+02:00")); + assertEquals(TimeUnit.MINUTES.toMillis(45), time("2011-10-30T03:00:00.000+01:00") - time("2011-10-30T02:15:00.000+01:00")); + } + /** * special test for DST switch from #9491 */ diff --git a/core/src/test/java/org/elasticsearch/common/settings/loader/PropertiesSettingsLoaderTests.java b/core/src/test/java/org/elasticsearch/common/settings/loader/PropertiesSettingsLoaderTests.java deleted file mode 100644 index c13ae7cc68b..00000000000 --- a/core/src/test/java/org/elasticsearch/common/settings/loader/PropertiesSettingsLoaderTests.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.settings.loader; - -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.test.ESTestCase; -import org.junit.Before; - -import java.io.IOException; -import java.nio.charset.Charset; - -public class PropertiesSettingsLoaderTests extends ESTestCase { - - private PropertiesSettingsLoader loader; - - @Before - public void setUp() throws Exception { - super.setUp(); - loader = new PropertiesSettingsLoader(); - } - - public void testDuplicateKeyFromStringThrowsException() throws IOException { - final ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> loader.load("foo=bar\nfoo=baz")); - assertEquals(e.getMessage(), "duplicate settings key [foo] found, previous value [bar], current value [baz]"); - } - - public void testDuplicateKeysFromBytesThrowsException() throws IOException { - final ElasticsearchParseException e = expectThrows( - ElasticsearchParseException.class, - () -> loader.load("foo=bar\nfoo=baz".getBytes(Charset.defaultCharset())) - ); - assertEquals(e.getMessage(), "duplicate settings key [foo] found, previous value [bar], current value [baz]"); - } - - public void testThatNoDuplicatesPropertiesDoesNotAcceptNullValues() { - final PropertiesSettingsLoader.NoDuplicatesProperties properties = loader.new NoDuplicatesProperties(); - expectThrows(NullPointerException.class, () -> properties.put("key", null)); - } - -} diff --git a/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java b/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java index 45db5a33d21..1a3fa4db137 100644 --- a/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java +++ b/core/src/test/java/org/elasticsearch/common/transport/BoundTransportAddressTests.java @@ -51,7 +51,7 @@ public class BoundTransportAddressTests extends ESTestCase { // serialize BytesStreamOutput streamOutput = new BytesStreamOutput(); transportAddress.writeTo(streamOutput); - StreamInput in = ByteBufferStreamInput.wrap(streamOutput.bytes()); + StreamInput in = streamOutput.bytes().streamInput(); BoundTransportAddress serializedAddress; if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java b/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java index f9a4d3f22af..7c5463baed2 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java @@ -82,7 +82,7 @@ public class DistanceUnitTests extends ESTestCase { for (DistanceUnit unit : DistanceUnit.values()) { try (BytesStreamOutput out = new BytesStreamOutput()) { unit.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat("Roundtrip serialisation failed.", DistanceUnit.readFromStream(in), equalTo(unit)); } } diff --git a/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java b/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java index 2b5a7c00e5d..3f6f1848fd8 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java @@ -145,7 +145,7 @@ public class FuzzinessTests extends ESTestCase { private static Fuzziness doSerializeRoundtrip(Fuzziness in) throws IOException { BytesStreamOutput output = new BytesStreamOutput(); in.writeTo(output); - StreamInput streamInput = StreamInput.wrap(output.bytes()); + StreamInput streamInput = output.bytes().streamInput(); return new Fuzziness(streamInput); } } diff --git a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java index 78afc9e514f..003d78ce42e 100644 --- a/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java +++ b/core/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java @@ -161,7 +161,7 @@ public class TimeValueTests extends ESTestCase { value.writeTo(out); assertEquals(expectedSize, out.size()); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); TimeValue inValue = new TimeValue(in); assertThat(inValue, equalTo(value)); diff --git a/core/src/test/java/org/elasticsearch/common/util/CancellableThreadsTests.java b/core/src/test/java/org/elasticsearch/common/util/CancellableThreadsTests.java index a89cb48c37a..729c431d2b2 100644 --- a/core/src/test/java/org/elasticsearch/common/util/CancellableThreadsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/CancellableThreadsTests.java @@ -132,7 +132,7 @@ public class CancellableThreadsTests extends ESTestCase { public void testCancellableThreads() throws InterruptedException { Thread[] threads = new Thread[randomIntBetween(3, 10)]; final TestPlan[] plans = new TestPlan[threads.length]; - final Throwable[] throwables = new Throwable[threads.length]; + final Exception[] exceptions = new Exception[threads.length]; final boolean[] interrupted = new boolean[threads.length]; final CancellableThreads cancellableThreads = new CancellableThreads(); final CountDownLatch readyForCancel = new CountDownLatch(threads.length); @@ -153,8 +153,8 @@ public class CancellableThreadsTests extends ESTestCase { } else { cancellableThreads.execute(new TestRunnable(plan, readyForCancel)); } - } catch (Throwable t) { - throwables[plan.id] = t; + } catch (Exception e) { + exceptions[plan.id] = e; } if (plan.exceptBeforeCancel || plan.exitBeforeCancel) { // we have to mark we're ready now (actually done). @@ -176,19 +176,19 @@ public class CancellableThreadsTests extends ESTestCase { TestPlan plan = plans[i]; final Class exceptionClass = plan.ioException ? IOCustomException.class : CustomException.class; if (plan.exceptBeforeCancel) { - assertThat(throwables[i], Matchers.instanceOf(exceptionClass)); + assertThat(exceptions[i], Matchers.instanceOf(exceptionClass)); } else if (plan.exitBeforeCancel) { - assertNull(throwables[i]); + assertNull(exceptions[i]); } else { // in all other cases, we expect a cancellation exception. - assertThat(throwables[i], Matchers.instanceOf(CancellableThreads.ExecutionCancelledException.class)); + assertThat(exceptions[i], Matchers.instanceOf(CancellableThreads.ExecutionCancelledException.class)); if (plan.exceptAfterCancel) { - assertThat(throwables[i].getSuppressed(), + assertThat(exceptions[i].getSuppressed(), Matchers.arrayContaining( Matchers.instanceOf(exceptionClass) )); } else { - assertThat(throwables[i].getSuppressed(), Matchers.emptyArray()); + assertThat(exceptions[i].getSuppressed(), Matchers.emptyArray()); } } assertThat(interrupted[plan.id], Matchers.equalTo(plan.presetInterrupt)); diff --git a/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java b/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java index 26d6af1cd5f..5302ba8d55c 100644 --- a/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java @@ -67,7 +67,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { public void testUpgradeCustomDataPath() throws IOException { Path customPath = createTempDir(); final Settings nodeSettings = Settings.builder() - .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()) + .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), customPath.toAbsolutePath().toString()).build(); try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { final Index index = new Index(randomAsciiOfLength(10), UUIDs.randomBase64UUID()); @@ -96,7 +96,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { public void testPartialUpgradeCustomDataPath() throws IOException { Path customPath = createTempDir(); final Settings nodeSettings = Settings.builder() - .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()) + .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), customPath.toAbsolutePath().toString()).build(); try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { final Index index = new Index(randomAsciiOfLength(10), UUIDs.randomBase64UUID()); @@ -136,7 +136,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { public void testUpgrade() throws IOException { final Settings nodeSettings = Settings.builder() - .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()).build(); + .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()).build(); try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { final Index index = new Index(randomAsciiOfLength(10), UUIDs.randomBase64UUID()); Settings settings = Settings.builder() @@ -159,7 +159,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { public void testUpgradeIndices() throws IOException { final Settings nodeSettings = Settings.builder() - .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()).build(); + .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), randomBoolean()).build(); try (NodeEnvironment nodeEnv = newNodeEnvironment(nodeSettings)) { Map> indexSettingsMap = new HashMap<>(); for (int i = 0; i < randomIntBetween(2, 5); i++) { @@ -256,7 +256,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { .numberOfReplicas(0) .build(); try (NodeEnvironment nodeEnvironment = newNodeEnvironment()) { - IndexMetaData.FORMAT.write(indexState, 1, nodeEnvironment.indexPaths(index)); + IndexMetaData.FORMAT.write(indexState, nodeEnvironment.indexPaths(index)); assertFalse(IndexFolderUpgrader.needsUpgrade(index, index.getUUID())); } } @@ -305,7 +305,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { for (int i = 0; i < nodePaths.length; i++) { oldIndexPaths[i] = nodePaths[i].indicesPath.resolve(indexSettings.getIndex().getName()); } - IndexMetaData.FORMAT.write(indexSettings.getIndexMetaData(), 1, oldIndexPaths); + IndexMetaData.FORMAT.write(indexSettings.getIndexMetaData(), oldIndexPaths); for (int id = 0; id < indexSettings.getNumberOfShards(); id++) { Path oldIndexPath = randomFrom(oldIndexPaths); ShardId shardId = new ShardId(indexSettings.getIndex(), id); @@ -316,7 +316,7 @@ public class IndexFolderUpgraderTests extends ESTestCase { writeShard(shardId, oldIndexPath, numIdxFiles, numTranslogFiles); } ShardStateMetaData state = new ShardStateMetaData(true, indexSettings.getUUID(), AllocationId.newInitializing()); - ShardStateMetaData.FORMAT.write(state, 1, oldIndexPath.resolve(String.valueOf(shardId.getId()))); + ShardStateMetaData.FORMAT.write(state, oldIndexPath.resolve(String.valueOf(shardId.getId()))); } } diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnableTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnableTests.java index 4c2e4700943..02adb783197 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnableTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractLifecycleRunnableTests.java @@ -48,7 +48,7 @@ public class AbstractLifecycleRunnableTests extends ESTestCase { AbstractLifecycleRunnable runnable = new AbstractLifecycleRunnable(lifecycle, logger) { @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { fail("It should not fail"); } @@ -77,7 +77,7 @@ public class AbstractLifecycleRunnableTests extends ESTestCase { AbstractLifecycleRunnable runnable = new AbstractLifecycleRunnable(lifecycle, logger) { @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { fail("It should not fail"); } @@ -106,7 +106,7 @@ public class AbstractLifecycleRunnableTests extends ESTestCase { AbstractLifecycleRunnable runnable = new AbstractLifecycleRunnable(lifecycle, logger) { @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { fail("It should not fail"); } @@ -145,7 +145,7 @@ public class AbstractLifecycleRunnableTests extends ESTestCase { AbstractLifecycleRunnable runnable = new AbstractLifecycleRunnable(lifecycle, logger) { @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { fail("It should not fail"); } diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractRunnableTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractRunnableTests.java index 54491aade6f..2373b30e1b2 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractRunnableTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/AbstractRunnableTests.java @@ -37,8 +37,8 @@ public class AbstractRunnableTests extends ESTestCase { AbstractRunnable runnable = new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - fail("It should not fail"); + public void onFailure(Exception e) { + fail(e.toString()); } @Override @@ -57,8 +57,8 @@ public class AbstractRunnableTests extends ESTestCase { AbstractRunnable runnable = new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - assertSame(exception, t); + public void onFailure(Exception e) { + assertSame(exception, e); } @Override @@ -76,8 +76,8 @@ public class AbstractRunnableTests extends ESTestCase { AbstractRunnable runnable = new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - fail("It should not fail"); + public void onFailure(Exception e) { + fail(e.toString()); } @Override @@ -91,7 +91,7 @@ public class AbstractRunnableTests extends ESTestCase { afterCallable.call(); } catch (Exception e) { - fail("Unexpected for mock."); + fail(e.toString()); } } }; @@ -111,8 +111,8 @@ public class AbstractRunnableTests extends ESTestCase { AbstractRunnable runnable = new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - assertSame(exception, t); + public void onFailure(Exception e) { + assertSame(exception, e); } @Override @@ -126,7 +126,7 @@ public class AbstractRunnableTests extends ESTestCase { afterCallable.call(); } catch (Exception e) { - fail("Unexpected for mock."); + fail(e.toString()); } } }; @@ -142,14 +142,15 @@ public class AbstractRunnableTests extends ESTestCase { AbstractRunnable runnable = new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - assertSame(exception, t); + public void onFailure(Exception e) { + assertSame(exception, e); try { failureCallable.call(); } - catch (Exception e) { - fail("Unexpected for mock."); + catch (Exception inner) { + inner.addSuppressed(e); + fail(inner.toString()); } } @@ -165,8 +166,8 @@ public class AbstractRunnableTests extends ESTestCase { public void testIsForceExecutuonDefaultsFalse() { AbstractRunnable runnable = new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - fail("Not tested"); + public void onFailure(Exception e) { + fail(e.toString()); } @Override diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java index 57da614e689..72db2911fc0 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java @@ -88,8 +88,8 @@ public class EsExecutorsTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { - throw new AssertionError(t); + public void onFailure(Exception e) { + throw new AssertionError(e); } }); @@ -178,7 +178,7 @@ public class EsExecutorsTests extends ESTestCase { try { barrier.await(); barrier.await(); - } catch (Throwable e) { + } catch (Exception e) { barrier.reset(e); } } @@ -214,7 +214,7 @@ public class EsExecutorsTests extends ESTestCase { try { barrier.await(); barrier.await(); - } catch (Throwable e) { + } catch (Exception e) { barrier.reset(e); } } diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java index df51e6e2e0d..933a46de510 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java @@ -41,9 +41,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -/** - * - */ public class PrioritizedExecutorsTests extends ESTestCase { private final ThreadContext holder = new ThreadContext(Settings.EMPTY); diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java index 9338beccb9a..c5d0ec4257e 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java @@ -88,7 +88,7 @@ public class RefCountedTests extends ESTestCase { final MyRefCounted counted = new MyRefCounted(); Thread[] threads = new Thread[randomIntBetween(2, 5)]; final CountDownLatch latch = new CountDownLatch(1); - final CopyOnWriteArrayList exceptions = new CopyOnWriteArrayList<>(); + final CopyOnWriteArrayList exceptions = new CopyOnWriteArrayList<>(); for (int i = 0; i < threads.length; i++) { threads[i] = new Thread() { @Override @@ -103,7 +103,7 @@ public class RefCountedTests extends ESTestCase { counted.decRef(); } } - } catch (Throwable e) { + } catch (Exception e) { exceptions.add(e); } } diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/SuspendableRefContainerTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/SuspendableRefContainerTests.java deleted file mode 100644 index 83db2d4a7c6..00000000000 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/SuspendableRefContainerTests.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.util.concurrent; - -import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.test.ESTestCase; - -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; - -public class SuspendableRefContainerTests extends ESTestCase { - - public void testBasicAcquire() throws InterruptedException { - SuspendableRefContainer refContainer = new SuspendableRefContainer(); - assertThat(refContainer.activeRefs(), equalTo(0)); - - Releasable lock1 = randomLockingMethod(refContainer); - assertThat(refContainer.activeRefs(), equalTo(1)); - Releasable lock2 = randomLockingMethod(refContainer); - assertThat(refContainer.activeRefs(), equalTo(2)); - lock1.close(); - assertThat(refContainer.activeRefs(), equalTo(1)); - lock1.close(); // check idempotence - assertThat(refContainer.activeRefs(), equalTo(1)); - lock2.close(); - assertThat(refContainer.activeRefs(), equalTo(0)); - } - - public void testAcquisitionBlockingBlocksNewAcquisitions() throws InterruptedException { - SuspendableRefContainer refContainer = new SuspendableRefContainer(); - assertThat(refContainer.activeRefs(), equalTo(0)); - - try (Releasable block = refContainer.blockAcquisition()) { - assertThat(refContainer.activeRefs(), equalTo(0)); - assertThat(refContainer.tryAcquire(), nullValue()); - assertThat(refContainer.activeRefs(), equalTo(0)); - } - try (Releasable lock = refContainer.tryAcquire()) { - assertThat(refContainer.activeRefs(), equalTo(1)); - } - - // same with blocking acquire - AtomicBoolean acquired = new AtomicBoolean(); - Thread t = new Thread(() -> { - try (Releasable lock = randomBoolean() ? refContainer.acquire() : refContainer.acquireUninterruptibly()) { - acquired.set(true); - assertThat(refContainer.activeRefs(), equalTo(1)); - } catch (InterruptedException e) { - fail("Interrupted"); - } - }); - try (Releasable block = refContainer.blockAcquisition()) { - assertThat(refContainer.activeRefs(), equalTo(0)); - t.start(); - // check that blocking acquire really blocks - assertThat(acquired.get(), equalTo(false)); - assertThat(refContainer.activeRefs(), equalTo(0)); - } - t.join(); - assertThat(acquired.get(), equalTo(true)); - assertThat(refContainer.activeRefs(), equalTo(0)); - } - - public void testAcquisitionBlockingWaitsOnExistingAcquisitions() throws InterruptedException { - SuspendableRefContainer refContainer = new SuspendableRefContainer(); - - AtomicBoolean acquired = new AtomicBoolean(); - Thread t = new Thread(() -> { - try (Releasable block = refContainer.blockAcquisition()) { - acquired.set(true); - assertThat(refContainer.activeRefs(), equalTo(0)); - } - }); - try (Releasable lock = randomLockingMethod(refContainer)) { - assertThat(refContainer.activeRefs(), equalTo(1)); - t.start(); - assertThat(acquired.get(), equalTo(false)); - assertThat(refContainer.activeRefs(), equalTo(1)); - } - t.join(); - assertThat(acquired.get(), equalTo(true)); - assertThat(refContainer.activeRefs(), equalTo(0)); - } - - private Releasable randomLockingMethod(SuspendableRefContainer refContainer) throws InterruptedException { - switch (randomInt(2)) { - case 0: return refContainer.tryAcquire(); - case 1: return refContainer.acquire(); - case 2: return refContainer.acquireUninterruptibly(); - } - throw new IllegalArgumentException("randomLockingMethod inconsistent"); - } -} diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java index 1a582d48f6b..d402f09f07d 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java @@ -19,14 +19,18 @@ package org.elasticsearch.common.util.concurrent; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.Collections; import java.util.HashMap; +import java.util.List; +import java.util.Map; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.sameInstance; public class ThreadContextTests extends ESTestCase { @@ -35,7 +39,7 @@ public class ThreadContextTests extends ESTestCase { Settings build = Settings.builder().put("request.headers.default", "1").build(); ThreadContext threadContext = new ThreadContext(build); threadContext.putHeader("foo", "bar"); - threadContext.putTransient("ctx.foo", new Integer(1)); + threadContext.putTransient("ctx.foo", 1); assertEquals("bar", threadContext.getHeader("foo")); assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); assertEquals("1", threadContext.getHeader("default")); @@ -46,7 +50,7 @@ public class ThreadContextTests extends ESTestCase { } assertEquals("bar", threadContext.getHeader("foo")); - assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals(Integer.valueOf(1), threadContext.getTransient("ctx.foo")); assertEquals("1", threadContext.getHeader("default")); } @@ -54,7 +58,7 @@ public class ThreadContextTests extends ESTestCase { Settings build = Settings.builder().put("request.headers.default", "1").build(); ThreadContext threadContext = new ThreadContext(build); threadContext.putHeader("foo", "bar"); - threadContext.putTransient("ctx.foo", new Integer(1)); + threadContext.putTransient("ctx.foo", 1); assertEquals("bar", threadContext.getHeader("foo")); assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); assertEquals("1", threadContext.getHeader("default")); @@ -70,7 +74,7 @@ public class ThreadContextTests extends ESTestCase { assertNull(threadContext.getHeader("simon")); assertEquals("bar", threadContext.getHeader("foo")); - assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals(Integer.valueOf(1), threadContext.getTransient("ctx.foo")); assertEquals("1", threadContext.getHeader("default")); } @@ -78,9 +82,9 @@ public class ThreadContextTests extends ESTestCase { Settings build = Settings.builder().put("request.headers.default", "1").build(); ThreadContext threadContext = new ThreadContext(build); threadContext.putHeader("foo", "bar"); - threadContext.putTransient("ctx.foo", new Integer(1)); + threadContext.putTransient("ctx.foo", 1); assertEquals("bar", threadContext.getHeader("foo")); - assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals(Integer.valueOf(1), threadContext.getTransient("ctx.foo")); assertEquals("1", threadContext.getHeader("default")); ThreadContext.StoredContext storedContext = threadContext.newStoredContext(); threadContext.putHeader("foo.bar", "baz"); @@ -91,7 +95,7 @@ public class ThreadContextTests extends ESTestCase { } assertEquals("bar", threadContext.getHeader("foo")); - assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals(Integer.valueOf(1), threadContext.getTransient("ctx.foo")); assertEquals("1", threadContext.getHeader("default")); assertEquals("baz", threadContext.getHeader("foo.bar")); if (randomBoolean()) { @@ -100,11 +104,44 @@ public class ThreadContextTests extends ESTestCase { storedContext.close(); } assertEquals("bar", threadContext.getHeader("foo")); - assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals(Integer.valueOf(1), threadContext.getTransient("ctx.foo")); assertEquals("1", threadContext.getHeader("default")); assertNull(threadContext.getHeader("foo.bar")); } + public void testResponseHeaders() { + final boolean expectThird = randomBoolean(); + + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + + threadContext.addResponseHeader("foo", "bar"); + // pretend that another thread created the same response + if (randomBoolean()) { + threadContext.addResponseHeader("foo", "bar"); + } + + threadContext.addResponseHeader("Warning", "One is the loneliest number"); + threadContext.addResponseHeader("Warning", "Two can be as bad as one"); + if (expectThird) { + threadContext.addResponseHeader("Warning", "No is the saddest experience"); + } + + final Map> responseHeaders = threadContext.getResponseHeaders(); + final List foo = responseHeaders.get("foo"); + final List warnings = responseHeaders.get("Warning"); + final int expectedWarnings = expectThird ? 3 : 2; + + assertThat(foo, hasSize(1)); + assertEquals("bar", foo.get(0)); + assertThat(warnings, hasSize(expectedWarnings)); + assertThat(warnings, hasItem(equalTo("One is the loneliest number"))); + assertThat(warnings, hasItem(equalTo("Two can be as bad as one"))); + + if (expectThird) { + assertThat(warnings, hasItem(equalTo("No is the saddest experience"))); + } + } + public void testCopyHeaders() { Settings build = Settings.builder().put("request.headers.default", "1").build(); ThreadContext threadContext = new ThreadContext(build); @@ -117,7 +154,7 @@ public class ThreadContextTests extends ESTestCase { Settings build = Settings.builder().put("request.headers.default", "1").build(); ThreadContext threadContext = new ThreadContext(build); threadContext.putHeader("foo", "bar"); - threadContext.putTransient("ctx.foo", new Integer(1)); + threadContext.putTransient("ctx.foo", 1); threadContext.close(); try { @@ -146,20 +183,35 @@ public class ThreadContextTests extends ESTestCase { Settings build = Settings.builder().put("request.headers.default", "1").build(); ThreadContext threadContext = new ThreadContext(build); threadContext.putHeader("foo", "bar"); - threadContext.putTransient("ctx.foo", new Integer(1)); + threadContext.putTransient("ctx.foo", 1); + threadContext.addResponseHeader("Warning", "123456"); + if (rarely()) { + threadContext.addResponseHeader("Warning", "123456"); + } + threadContext.addResponseHeader("Warning", "234567"); + BytesStreamOutput out = new BytesStreamOutput(); threadContext.writeTo(out); try (ThreadContext.StoredContext ctx = threadContext.stashContext()) { assertNull(threadContext.getHeader("foo")); assertNull(threadContext.getTransient("ctx.foo")); + assertTrue(threadContext.getResponseHeaders().isEmpty()); assertEquals("1", threadContext.getHeader("default")); - threadContext.readHeaders(StreamInput.wrap(out.bytes())); + threadContext.readHeaders(out.bytes().streamInput()); assertEquals("bar", threadContext.getHeader("foo")); assertNull(threadContext.getTransient("ctx.foo")); + + final Map> responseHeaders = threadContext.getResponseHeaders(); + final List warnings = responseHeaders.get("Warning"); + + assertThat(responseHeaders.keySet(), hasSize(1)); + assertThat(warnings, hasSize(2)); + assertThat(warnings, hasItem(equalTo("123456"))); + assertThat(warnings, hasItem(equalTo("234567"))); } assertEquals("bar", threadContext.getHeader("foo")); - assertEquals(new Integer(1), threadContext.getTransient("ctx.foo")); + assertEquals(Integer.valueOf(1), threadContext.getTransient("ctx.foo")); assertEquals("1", threadContext.getHeader("default")); } @@ -169,30 +221,44 @@ public class ThreadContextTests extends ESTestCase { Settings build = Settings.builder().put("request.headers.default", "1").build(); ThreadContext threadContext = new ThreadContext(build); threadContext.putHeader("foo", "bar"); - threadContext.putTransient("ctx.foo", new Integer(1)); + threadContext.putTransient("ctx.foo", 1); + threadContext.addResponseHeader("Warning", "123456"); + if (rarely()) { + threadContext.addResponseHeader("Warning", "123456"); + } + threadContext.addResponseHeader("Warning", "234567"); assertEquals("bar", threadContext.getHeader("foo")); assertNotNull(threadContext.getTransient("ctx.foo")); assertEquals("1", threadContext.getHeader("default")); + assertThat(threadContext.getResponseHeaders().keySet(), hasSize(1)); threadContext.writeTo(out); } { Settings otherSettings = Settings.builder().put("request.headers.default", "5").build(); - ThreadContext otherhreadContext = new ThreadContext(otherSettings); - otherhreadContext.readHeaders(StreamInput.wrap(out.bytes())); + ThreadContext otherThreadContext = new ThreadContext(otherSettings); + otherThreadContext.readHeaders(out.bytes().streamInput()); - assertEquals("bar", otherhreadContext.getHeader("foo")); - assertNull(otherhreadContext.getTransient("ctx.foo")); - assertEquals("1", otherhreadContext.getHeader("default")); + assertEquals("bar", otherThreadContext.getHeader("foo")); + assertNull(otherThreadContext.getTransient("ctx.foo")); + assertEquals("1", otherThreadContext.getHeader("default")); + + final Map> responseHeaders = otherThreadContext.getResponseHeaders(); + final List warnings = responseHeaders.get("Warning"); + + assertThat(responseHeaders.keySet(), hasSize(1)); + assertThat(warnings, hasSize(2)); + assertThat(warnings, hasItem(equalTo("123456"))); + assertThat(warnings, hasItem(equalTo("234567"))); } } - + public void testSerializeInDifferentContextNoDefaults() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); threadContext.putHeader("foo", "bar"); - threadContext.putTransient("ctx.foo", new Integer(1)); + threadContext.putTransient("ctx.foo", 1); assertEquals("bar", threadContext.getHeader("foo")); assertNotNull(threadContext.getTransient("ctx.foo")); @@ -202,7 +268,7 @@ public class ThreadContextTests extends ESTestCase { { Settings otherSettings = Settings.builder().put("request.headers.default", "5").build(); ThreadContext otherhreadContext = new ThreadContext(otherSettings); - otherhreadContext.readHeaders(StreamInput.wrap(out.bytes())); + otherhreadContext.readHeaders(out.bytes().streamInput()); assertEquals("bar", otherhreadContext.getHeader("foo")); assertNull(otherhreadContext.getTransient("ctx.foo")); @@ -210,7 +276,6 @@ public class ThreadContextTests extends ESTestCase { } } - public void testCanResetDefault() { Settings build = Settings.builder().put("request.headers.default", "1").build(); ThreadContext threadContext = new ThreadContext(build); @@ -294,8 +359,8 @@ public class ThreadContextTests extends ESTestCase { } return new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - throw new RuntimeException(t); + public void onFailure(Exception e) { + throw new RuntimeException(e); } @Override diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java index f3592936765..bef4a047ef5 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/ConstructingObjectParserTests.java @@ -68,7 +68,7 @@ public class ConstructingObjectParserTests extends ESTestCase { assertEquals(expected.b, parsed.b); assertEquals(expected.c, parsed.c); assertEquals(expected.d, parsed.d); - } catch (Throwable e) { + } catch (Exception e) { // It is convenient to decorate the error message with the json throw new Exception("Error parsing: [" + builder.string() + "]", e); } diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java index 159d8a97be4..a8d26e87ecf 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java @@ -35,7 +35,7 @@ import org.elasticsearch.test.ESTestCase; public class ObjectParserTests extends ESTestCase { - private final static ParseFieldMatcherSupplier STRICT_PARSING = () -> ParseFieldMatcher.STRICT; + private static final ParseFieldMatcherSupplier STRICT_PARSING = () -> ParseFieldMatcher.STRICT; public void testBasics() throws IOException { XContentParser parser = XContentType.JSON.xContent().createParser( diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java index 583234461b3..8319873878a 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/XContentFactoryTests.java @@ -57,11 +57,10 @@ public class XContentFactoryTests extends ESTestCase { builder.endObject(); assertThat(XContentFactory.xContentType(builder.bytes()), equalTo(type)); - BytesArray bytesArray = builder.bytes().toBytesArray(); - assertThat(XContentFactory.xContentType(StreamInput.wrap(bytesArray.array(), bytesArray.arrayOffset(), bytesArray.length())), equalTo(type)); + assertThat(XContentFactory.xContentType(builder.bytes().streamInput()), equalTo(type)); // CBOR is binary, cannot use String - if (type != XContentType.CBOR) { + if (type != XContentType.CBOR && type != XContentType.SMILE) { assertThat(XContentFactory.xContentType(builder.string()), equalTo(type)); } } diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java index 34944e713bd..fe69fc1f05d 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java @@ -94,7 +94,7 @@ public class XContentBuilderTests extends ESTestCase { xContentBuilder.startObject(); xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"foo\":{\"test\":\"value\"}}")); + assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"foo\":{\"test\":\"value\"}}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); @@ -102,7 +102,7 @@ public class XContentBuilderTests extends ESTestCase { xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"}}")); + assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"}}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); @@ -110,7 +110,7 @@ public class XContentBuilderTests extends ESTestCase { xContentBuilder.field("test", "value"); xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"}}")); + assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"}}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); @@ -119,7 +119,7 @@ public class XContentBuilderTests extends ESTestCase { xContentBuilder.rawField("foo", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.field("test1", "value1"); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}")); + assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"test1\":\"value1\"}")); } { XContentBuilder xContentBuilder = XContentFactory.contentBuilder(XContentType.JSON); @@ -129,7 +129,7 @@ public class XContentBuilderTests extends ESTestCase { xContentBuilder.rawField("foo1", new BytesArray("{\"test\":\"value\"}")); xContentBuilder.field("test1", "value1"); xContentBuilder.endObject(); - assertThat(xContentBuilder.bytes().toUtf8(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}")); + assertThat(xContentBuilder.bytes().utf8ToString(), equalTo("{\"test\":\"value\",\"foo\":{\"test\":\"value\"},\"foo1\":{\"test\":\"value\"},\"test1\":\"value1\"}")); } } @@ -161,15 +161,14 @@ public class XContentBuilderTests extends ESTestCase { gen.writeEndObject(); gen.close(); - byte[] data = bos.bytes().toBytes(); - String sData = new String(data, "UTF8"); + String sData = bos.bytes().utf8ToString(); assertThat(sData, equalTo("{\"name\":\"something\", source : { test : \"value\" },\"name2\":\"something2\"}")); } public void testByteConversion() throws Exception { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.startObject().field("test_name", (Byte)(byte)120).endObject(); - assertThat(builder.bytes().toUtf8(), equalTo("{\"test_name\":120}")); + assertThat(builder.bytes().utf8ToString(), equalTo("{\"test_name\":120}")); } public void testDateTypesConversion() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java index bf2dd442b64..efbca114aac 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java @@ -63,7 +63,8 @@ public class JsonVsCborTests extends ESTestCase { xsonGen.close(); jsonGen.close(); - verifySameTokens(XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes().toBytes()), XContentFactory.xContent(XContentType.CBOR).createParser(xsonOs.bytes().toBytes())); + verifySameTokens(XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes()), + XContentFactory.xContent(XContentType.CBOR).createParser(xsonOs.bytes())); } private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java index 9e686fe78f1..63b19a63822 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java @@ -63,7 +63,8 @@ public class JsonVsSmileTests extends ESTestCase { xsonGen.close(); jsonGen.close(); - verifySameTokens(XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes().toBytes()), XContentFactory.xContent(XContentType.SMILE).createParser(xsonOs.bytes().toBytes())); + verifySameTokens(XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes()), + XContentFactory.xContent(XContentType.SMILE).createParser(xsonOs.bytes())); } private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTestCase.java b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTestCase.java index e3d8735e05e..b8b38a543f6 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractFilteringJsonGeneratorTestCase.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; -import java.io.ByteArrayInputStream; import java.io.IOException; import static org.hamcrest.CoreMatchers.is; @@ -45,7 +44,7 @@ public abstract class AbstractFilteringJsonGeneratorTestCase extends ESTestCase assertNotNull(expected); // Verify that the result is equal to the expected string - assertThat(builder.bytes().toUtf8(), is(expected.bytes().toUtf8())); + assertThat(builder.bytes().utf8ToString(), is(expected.bytes().utf8ToString())); } protected void assertBinary(XContentBuilder expected, XContentBuilder builder) { @@ -1166,15 +1165,15 @@ public abstract class AbstractFilteringJsonGeneratorTestCase extends ESTestCase // Test method: rawField(String fieldName, InputStream content) assertXContentBuilder(expectedRawField, - newXContentBuilder().startObject().field("foo", 0).rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + newXContentBuilder().startObject().field("foo", 0).rawField("raw", raw.streamInput()).endObject()); assertXContentBuilder(expectedRawFieldFiltered, newXContentBuilder("f*", true).startObject().field("foo", 0) - .rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + .rawField("raw", raw.streamInput()).endObject()); assertXContentBuilder(expectedRawFieldFiltered, newXContentBuilder("r*", false).startObject().field("foo", 0) - .rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + .rawField("raw", raw.streamInput()).endObject()); assertXContentBuilder(expectedRawFieldNotFiltered, newXContentBuilder("r*", true).startObject().field("foo", 0) - .rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + .rawField("raw", raw.streamInput()).endObject()); assertXContentBuilder(expectedRawFieldNotFiltered, newXContentBuilder("f*", false).startObject().field("foo", 0) - .rawField("raw", new ByteArrayInputStream(raw.toBytes())).endObject()); + .rawField("raw", raw.streamInput()).endObject()); } public void testArrays() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java index dd2fe42eb8e..8dbefedb249 100644 --- a/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java +++ b/core/src/test/java/org/elasticsearch/common/xcontent/support/filtering/FilterPathGeneratorFilteringTests.java @@ -142,7 +142,7 @@ public class FilterPathGeneratorFilteringTests extends ESTestCase { } } } - assertThat(os.bytes().toUtf8(), equalTo(replaceQuotes(expected))); + assertThat(os.bytes().utf8ToString(), equalTo(replaceQuotes(expected))); } } diff --git a/core/src/test/java/org/elasticsearch/consistencylevel/WriteConsistencyLevelIT.java b/core/src/test/java/org/elasticsearch/consistencylevel/WriteConsistencyLevelIT.java index fb54e9b6f52..067f7e11530 100644 --- a/core/src/test/java/org/elasticsearch/consistencylevel/WriteConsistencyLevelIT.java +++ b/core/src/test/java/org/elasticsearch/consistencylevel/WriteConsistencyLevelIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.consistencylevel; import org.elasticsearch.action.UnavailableShardsException; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; @@ -30,6 +31,7 @@ import org.elasticsearch.test.ESIntegTestCase; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; /** @@ -37,12 +39,11 @@ import static org.hamcrest.Matchers.equalTo; */ public class WriteConsistencyLevelIT extends ESIntegTestCase { public void testWriteConsistencyLevelReplication2() throws Exception { - prepareCreate("test", 1, Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 2)).execute().actionGet(); + CreateIndexResponse createIndexResponse = + prepareCreate("test", 1, Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 2)) + .get(); - ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForActiveShards(1).setWaitForYellowStatus().execute().actionGet(); - logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); - assertThat(clusterHealth.isTimedOut(), equalTo(false)); - assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); + assertAcked(createIndexResponse); // indexing, by default, will work (ONE consistency level) client().prepareIndex("test", "type1", "1").setSource(source("1", "test")).setConsistencyLevel(WriteConsistencyLevel.ONE).execute().actionGet(); @@ -59,7 +60,13 @@ public class WriteConsistencyLevelIT extends ESIntegTestCase { allowNodes("test", 2); - clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForActiveShards(2).setWaitForYellowStatus().execute().actionGet(); + ClusterHealthResponse clusterHealth = + client().admin().cluster().prepareHealth() + .setWaitForEvents(Priority.LANGUID) + .setWaitForActiveShards(2) + .setWaitForYellowStatus() + .execute() + .actionGet(); logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); diff --git a/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java b/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java index 4efedd9154a..c25a0a6503b 100644 --- a/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java +++ b/core/src/test/java/org/elasticsearch/deps/jackson/JacksonLocationTests.java @@ -56,8 +56,7 @@ public class JacksonLocationTests extends ESTestCase { gen.close(); - byte[] data = os.bytes().toBytes(); - JsonParser parser = new JsonFactory().createParser(data); + JsonParser parser = new JsonFactory().createParser(os.bytes().streamInput()); assertThat(parser.nextToken(), equalTo(JsonToken.START_OBJECT)); assertThat(parser.nextToken(), equalTo(JsonToken.FIELD_NAME)); // "index" diff --git a/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java b/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java index f6aac190c4b..4ff4c4cd035 100644 --- a/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/BlockingClusterStatePublishResponseHandlerTests.java @@ -21,7 +21,7 @@ package org.elasticsearch.discovery; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.test.ESTestCase; @@ -40,7 +40,7 @@ import static org.hamcrest.Matchers.not; public class BlockingClusterStatePublishResponseHandlerTests extends ESTestCase { - static private class PublishResponder extends AbstractRunnable { + private static class PublishResponder extends AbstractRunnable { final boolean fail; final DiscoveryNode node; @@ -58,8 +58,8 @@ public class BlockingClusterStatePublishResponseHandlerTests extends ESTestCase } @Override - public void onFailure(Throwable t) { - logger.error("unexpected error", t); + public void onFailure(Exception e) { + logger.error("unexpected error", e); } @Override @@ -77,7 +77,7 @@ public class BlockingClusterStatePublishResponseHandlerTests extends ESTestCase int nodeCount = scaledRandomIntBetween(10, 20); DiscoveryNode[] allNodes = new DiscoveryNode[nodeCount]; for (int i = 0; i < nodeCount; i++) { - DiscoveryNode node = new DiscoveryNode("node_" + i, DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode node = new DiscoveryNode("node_" + i, LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); allNodes[i] = node; } diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java index 86c3cd91bff..057b54c7a07 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java @@ -38,10 +38,8 @@ public class DiscoveryModuleTests extends ModuleTestCase { } } - public void testRegisterMasterElectionService() { - Settings settings = Settings.builder().put(Node.NODE_LOCAL_SETTING.getKey(), false). - put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING.getKey(), "custom").build(); + Settings settings = Settings.builder().put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING.getKey(), "custom").build(); DiscoveryModule module = new DiscoveryModule(settings); module.addElectMasterService("custom", DummyMasterElectionService.class); assertBinding(module, ElectMasterService.class, DummyMasterElectionService.class); @@ -49,24 +47,20 @@ public class DiscoveryModuleTests extends ModuleTestCase { } public void testLoadUnregisteredMasterElectionService() { - Settings settings = Settings.builder().put(Node.NODE_LOCAL_SETTING.getKey(), false). - put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING.getKey(), "foobar").build(); + Settings settings = Settings.builder().put(DiscoveryModule.ZEN_MASTER_SERVICE_TYPE_SETTING.getKey(), "foobar").build(); DiscoveryModule module = new DiscoveryModule(settings); module.addElectMasterService("custom", DummyMasterElectionService.class); assertBindingFailure(module, "Unknown master service type [foobar]"); } public void testRegisterDefaults() { - boolean local = randomBoolean(); - Settings settings = Settings.builder().put(Node.NODE_LOCAL_SETTING.getKey(), local).build(); + Settings settings = Settings.EMPTY; DiscoveryModule module = new DiscoveryModule(settings); - assertBinding(module, Discovery.class, local ? LocalDiscovery.class : ZenDiscovery.class); + assertBinding(module, Discovery.class, ZenDiscovery.class); } public void testRegisterDiscovery() { - boolean local = randomBoolean(); - Settings settings = Settings.builder().put(Node.NODE_LOCAL_SETTING.getKey(), local). - put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "custom").build(); + Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "custom").build(); DiscoveryModule module = new DiscoveryModule(settings); module.addDiscoveryType("custom", NoopDiscovery.class); assertBinding(module, Discovery.class, NoopDiscovery.class); diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index 0187bb28f36..7d74c7ec2f0 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -62,6 +62,7 @@ import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.discovery.ClusterDiscoveryConfiguration; import org.elasticsearch.test.disruption.BlockClusterStateProcessing; +import org.elasticsearch.test.disruption.BridgePartition; import org.elasticsearch.test.disruption.IntermittentLongGCDisruption; import org.elasticsearch.test.disruption.LongGCDisruption; import org.elasticsearch.test.disruption.NetworkDelaysPartition; @@ -169,12 +170,11 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { return nodes; } - final static Settings DEFAULT_SETTINGS = Settings.builder() + static final Settings DEFAULT_SETTINGS = Settings.builder() .put(FaultDetection.PING_TIMEOUT_SETTING.getKey(), "1s") // for hitting simulated network failures quickly .put(FaultDetection.PING_RETRIES_SETTING.getKey(), "1") // for hitting simulated network failures quickly .put("discovery.zen.join_timeout", "10s") // still long to induce failures but to long so test won't time out .put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "1s") // <-- for hitting simulated network failures quickly - .put("http.enabled", false) // just to make test quicker .build(); @Override @@ -447,8 +447,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { final int seconds = !(TEST_NIGHTLY && rarely()) ? 1 : 5; final String timeout = seconds + "s"; - // TODO: add node count randomizaion - final List nodes = startCluster(3); + final List nodes = startCluster(rarely() ? 5 : 3); assertAcked(prepareCreate("test") .setSettings(Settings.builder() @@ -503,8 +502,8 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { } } catch (InterruptedException e) { // fine - semaphore interrupt - } catch (Throwable t) { - logger.info("unexpected exception in background thread of [{}]", t, node); + } catch (AssertionError | Exception e) { + logger.info("unexpected exception in background thread of [{}]", e, node); } } }); @@ -540,7 +539,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { logger.info("stopping disruption"); disruptionScheme.stopDisrupting(); for (String node : internalCluster().getNodeNames()) { - ensureStableCluster(3, TimeValue.timeValueMillis(disruptionScheme.expectedTimeToHeal().millis() + + ensureStableCluster(nodes.size(), TimeValue.timeValueMillis(disruptionScheme.expectedTimeToHeal().millis() + DISRUPTION_HEALING_OVERHEAD.millis()), true, node); } ensureGreen("test"); @@ -548,7 +547,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { logger.info("validating successful docs"); for (String node : nodes) { try { - logger.debug("validating through node [{}]", node); + logger.debug("validating through node [{}] ([{}] acked docs)", node, ackedDocs.size()); for (String id : ackedDocs.keySet()) { assertTrue("doc [" + id + "] indexed via node [" + ackedDocs.get(id) + "] not found", client(node).prepareGet("test", "type", id).setPreference("_local").get().isExists()); @@ -690,8 +689,8 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.warn("failure [{}]", t, source); + public void onFailure(String source, Exception e) { + logger.warn("failure [{}]", e, source); } }); @@ -960,7 +959,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { success.set(false); latch.countDown(); assert false; @@ -1133,7 +1132,6 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { final List allMasterEligibleNodes = masterNodes.get(); ensureStableCluster(3); assertAcked(prepareCreate("test")); - ensureYellow(); final String masterNode1 = internalCluster().getMasterName(); NetworkPartition networkPartition = new NetworkUnresponsivePartition(masterNode1, dataNode.get(), random()); @@ -1192,7 +1190,8 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { new NetworkUnresponsivePartition(random()), new NetworkDelaysPartition(random()), new NetworkDisconnectPartition(random()), - new SlowClusterStateProcessing(random()) + new SlowClusterStateProcessing(random()), + new BridgePartition(random(), randomBoolean()) ); Collections.shuffle(list, random()); setDisruptionScheme(list.get(0)); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java index 0f93e5d460c..b31b0cbaa55 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ElectMasterServiceTests.java @@ -22,7 +22,7 @@ package org.elasticsearch.discovery.zen; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.test.ESTestCase; @@ -46,7 +46,7 @@ public class ElectMasterServiceTests extends ESTestCase { if (randomBoolean()) { roles.add(DiscoveryNode.Role.MASTER); } - DiscoveryNode node = new DiscoveryNode("n_" + i, "n_" + i, DummyTransportAddress.INSTANCE, Collections.emptyMap(), + DiscoveryNode node = new DiscoveryNode("n_" + i, "n_" + i, LocalTransportAddress.buildUnique(), Collections.emptyMap(), roles, Version.CURRENT); nodes.add(node); } diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java index 135352343b6..cd2b4eaf2e4 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/NodeJoinControllerTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.RoutingService; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation; @@ -32,7 +31,6 @@ import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -41,6 +39,7 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.membership.MembershipAction; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -50,6 +49,8 @@ import org.junit.Before; import org.junit.BeforeClass; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -68,6 +69,7 @@ import static java.util.Collections.emptySet; import static java.util.Collections.shuffle; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -99,10 +101,9 @@ public class NodeJoinControllerTests extends ESTestCase { // make sure we have a master setState(clusterService, ClusterState.builder(clusterService.state()).nodes( DiscoveryNodes.builder(initialNodes).masterNodeId(localNode.getId()))); - nodeJoinController = new NodeJoinController(clusterService, new NoopRoutingService(Settings.EMPTY), - new ElectMasterService(Settings.EMPTY), - new DiscoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), - Settings.EMPTY); + nodeJoinController = new NodeJoinController(clusterService, new NoopAllocationService(Settings.EMPTY), + new ElectMasterService(Settings.EMPTY), new DiscoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, + ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), Settings.EMPTY); } @After @@ -198,17 +199,19 @@ public class NodeJoinControllerTests extends ESTestCase { final SimpleFuture electionFuture = new SimpleFuture("master election"); final Thread masterElection = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - logger.error("unexpected error from waitToBeElectedAsMaster", t); - electionFuture.markAsFailed(t); + public void onFailure(Exception e) { + logger.error("unexpected error from waitToBeElectedAsMaster", e); + electionFuture.markAsFailed(e); } @Override protected void doRun() throws Exception { - nodeJoinController.waitToBeElectedAsMaster(requiredJoins, TimeValue.timeValueHours(30), new NodeJoinController.ElectionCallback() { + nodeJoinController.waitToBeElectedAsMaster(requiredJoins, TimeValue.timeValueHours(30), + new NodeJoinController.ElectionCallback() { @Override public void onElectedAsMaster(ClusterState state) { - assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), equalTo(true)); + assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), + equalTo(true)); electionFuture.markAsDone(); } @@ -246,17 +249,19 @@ public class NodeJoinControllerTests extends ESTestCase { final SimpleFuture electionFuture = new SimpleFuture("master election"); final Thread masterElection = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - logger.error("unexpected error from waitToBeElectedAsMaster", t); - electionFuture.markAsFailed(t); + public void onFailure(Exception e) { + logger.error("unexpected error from waitToBeElectedAsMaster", e); + electionFuture.markAsFailed(e); } @Override protected void doRun() throws Exception { - nodeJoinController.waitToBeElectedAsMaster(requiredJoins, TimeValue.timeValueHours(30), new NodeJoinController.ElectionCallback() { + nodeJoinController.waitToBeElectedAsMaster(requiredJoins, TimeValue.timeValueHours(30), + new NodeJoinController.ElectionCallback() { @Override public void onElectedAsMaster(ClusterState state) { - assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), equalTo(true)); + assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), + equalTo(true)); electionFuture.markAsDone(); } @@ -298,7 +303,8 @@ public class NodeJoinControllerTests extends ESTestCase { } logger.debug("--> asserting master election didn't finish yet"); - assertThat("election finished after [" + initialJoins + "] master nodes but required joins is [" + requiredJoins + "]", electionFuture.isDone(), equalTo(false)); + assertThat("election finished after [" + initialJoins + "] master nodes but required joins is [" + requiredJoins + "]", + electionFuture.isDone(), equalTo(false)); final int finalJoins = requiredJoins - initialJoins + randomInt(5); nodesToJoin.clear(); @@ -374,7 +380,8 @@ public class NodeJoinControllerTests extends ESTestCase { nodeJoinController.waitToBeElectedAsMaster(requiredJoins, TimeValue.timeValueMillis(1), new NodeJoinController.ElectionCallback() { @Override public void onElectedAsMaster(ClusterState state) { - assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), equalTo(true)); + assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), + equalTo(true)); latch.countDown(); } @@ -403,7 +410,7 @@ public class NodeJoinControllerTests extends ESTestCase { public void testNewClusterStateOnExistingNodeJoin() throws InterruptedException, ExecutionException { ClusterState state = clusterService.state(); final DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(state.nodes()); - final DiscoveryNode other_node = new DiscoveryNode("other_node", DummyTransportAddress.INSTANCE, + final DiscoveryNode other_node = new DiscoveryNode("other_node", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); nodesBuilder.put(other_node); setState(clusterService, ClusterState.builder(state).nodes(nodesBuilder)); @@ -425,9 +432,9 @@ public class NodeJoinControllerTests extends ESTestCase { nodes.add(node); threads[i] = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - logger.error("unexpected error in join thread", t); - backgroundExceptions.add(t); + public void onFailure(Exception e) { + logger.error("unexpected error in join thread", e); + backgroundExceptions.add(e); } @Override @@ -468,9 +475,9 @@ public class NodeJoinControllerTests extends ESTestCase { nodes.add(node); threads[i] = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - logger.error("unexpected error in join thread", t); - backgroundExceptions.add(t); + public void onFailure(Exception e) { + logger.error("unexpected error in join thread", e); + backgroundExceptions.add(e); } @Override @@ -492,7 +499,8 @@ public class NodeJoinControllerTests extends ESTestCase { nodeJoinController.waitToBeElectedAsMaster(requiredJoins, TimeValue.timeValueHours(30), new NodeJoinController.ElectionCallback() { @Override public void onElectedAsMaster(ClusterState state) { - assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), equalTo(true)); + assertThat("callback called with elected as master, but state disagrees", state.nodes().isLocalNodeElectedMaster(), + equalTo(true)); latch.countDown(); } @@ -515,17 +523,37 @@ public class NodeJoinControllerTests extends ESTestCase { assertNodesInCurrentState(nodes); } + public void testRejectingJoinWithSameAddressButDifferentId() throws InterruptedException, ExecutionException { + ClusterState state = clusterService.state(); + final DiscoveryNode other_node = new DiscoveryNode("other_node", state.nodes().getLocalNode().getAddress(), + emptyMap(), emptySet(), Version.CURRENT); - static class NoopRoutingService extends RoutingService { + ExecutionException e = expectThrows(ExecutionException.class, () -> joinNode(other_node)); + assertThat(e.getMessage(), containsString("found existing node")); + } - public NoopRoutingService(Settings settings) { - super(settings, null, new NoopAllocationService(settings)); - } + public void testRejectingJoinWithSameIdButDifferentAddress() throws InterruptedException, ExecutionException { + ClusterState state = clusterService.state(); + final DiscoveryNode other_node = new DiscoveryNode(state.nodes().getLocalNode().getId(), + new LocalTransportAddress(randomAsciiOfLength(20)), emptyMap(), emptySet(), Version.CURRENT); - @Override - protected void performReroute(String reason) { + ExecutionException e = expectThrows(ExecutionException.class, () -> joinNode(other_node)); + assertThat(e.getMessage(), containsString("found existing node")); + } - } + public void testJoinWithSameIdSameAddressButDifferentMeta() throws InterruptedException, ExecutionException { + ClusterState state = clusterService.state(); + final DiscoveryNode localNode = state.nodes().getLocalNode(); + final DiscoveryNode other_node = new DiscoveryNode( + randomBoolean() ? localNode.getName() : "other_name", + localNode.getId(), localNode.getAddress(), + randomBoolean() ? localNode.getAttributes() : Collections.singletonMap("attr", "other"), + randomBoolean() ? localNode.getRoles() : new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values()))), + randomBoolean() ? localNode.getVersion() : VersionUtils.randomVersion(random())); + + joinNode(other_node); + + assertThat(clusterService.localNode(), equalTo(other_node)); } static class NoopAllocationService extends AllocationService { @@ -535,12 +563,14 @@ public class NodeJoinControllerTests extends ESTestCase { } @Override - public RoutingAllocation.Result applyStartedShards(ClusterState clusterState, List startedShards, boolean withReroute) { + public RoutingAllocation.Result applyStartedShards(ClusterState clusterState, List startedShards, + boolean withReroute) { return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData()); } @Override - public RoutingAllocation.Result applyFailedShards(ClusterState clusterState, List failedShards) { + public RoutingAllocation.Result applyFailedShards(ClusterState clusterState, + List failedShards) { return new RoutingAllocation.Result(false, clusterState.routingTable(), clusterState.metaData()); } @@ -581,7 +611,7 @@ public class NodeJoinControllerTests extends ESTestCase { } } - final static AtomicInteger joinId = new AtomicInteger(); + static final AtomicInteger joinId = new AtomicInteger(); private SimpleFuture joinNodeAsync(final DiscoveryNode node) throws InterruptedException { final SimpleFuture future = new SimpleFuture("join of " + node + " (id [" + joinId.incrementAndGet() + "]"); @@ -596,9 +626,9 @@ public class NodeJoinControllerTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { - logger.error("unexpected error for {}", t, future); - future.markAsFailed(t); + public void onFailure(Exception e) { + logger.error("unexpected error for {}", e, future); + future.markAsFailed(e); } }); return future; @@ -608,8 +638,8 @@ public class NodeJoinControllerTests extends ESTestCase { * creates an object clone of node, so it will be a different object instance */ private DiscoveryNode cloneNode(DiscoveryNode node) { - return new DiscoveryNode(node.getName(), node.getId(), node.getHostName(), node.getHostAddress(), node.getAddress(), - node.getAttributes(), node.getRoles(), node.getVersion()); + return new DiscoveryNode(node.getName(), node.getId(), node.getEphemeralId(), node.getHostName(), node.getHostAddress(), + node.getAddress(), node.getAttributes(), node.getRoles(), node.getVersion()); } private void joinNode(final DiscoveryNode node) throws InterruptedException, ExecutionException { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/NodeRemovalClusterStateTaskExecutorTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/NodeRemovalClusterStateTaskExecutorTests.java new file mode 100644 index 00000000000..667ca6fbccb --- /dev/null +++ b/core/src/test/java/org/elasticsearch/discovery/zen/NodeRemovalClusterStateTaskExecutorTests.java @@ -0,0 +1,185 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.zen; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.allocation.AllocationService; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.transport.LocalTransportAddress; +import org.elasticsearch.discovery.zen.elect.ElectMasterService; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiFunction; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; + +import static org.hamcrest.CoreMatchers.equalTo; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; + +public class NodeRemovalClusterStateTaskExecutorTests extends ESTestCase { + + public void testRemovingNonExistentNodes() throws Exception { + final ZenDiscovery.NodeRemovalClusterStateTaskExecutor executor = + new ZenDiscovery.NodeRemovalClusterStateTaskExecutor(null, null, null, logger); + final DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); + final int nodes = randomIntBetween(2, 16); + for (int i = 0; i < nodes; i++) { + builder.put(node(i)); + } + final ClusterState clusterState = ClusterState.builder(new ClusterName("test")).nodes(builder).build(); + + final DiscoveryNodes.Builder removeBuilder = DiscoveryNodes.builder(); + for (int i = nodes; i < nodes + randomIntBetween(1, 16); i++) { + removeBuilder.put(node(i)); + } + final List tasks = + StreamSupport + .stream(removeBuilder.build().spliterator(), false) + .map(node -> new ZenDiscovery.NodeRemovalClusterStateTaskExecutor.Task(node, randomBoolean() ? "left" : "failed")) + .collect(Collectors.toList()); + + final ClusterStateTaskExecutor.BatchResult result + = executor.execute(clusterState, tasks); + assertThat(result.resultingState, equalTo(clusterState)); + } + + public void testNotEnoughMasterNodesAfterRemove() throws Exception { + final ElectMasterService electMasterService = mock(ElectMasterService.class); + when(electMasterService.hasEnoughMasterNodes(any(Iterable.class))).thenReturn(false); + + final AllocationService allocationService = mock(AllocationService.class); + + final AtomicBoolean rejoined = new AtomicBoolean(); + final AtomicReference rejoinedClusterState = new AtomicReference<>(); + final BiFunction rejoin = (cs, r) -> { + rejoined.set(true); + rejoinedClusterState.set(ClusterState.builder(cs).build()); + return rejoinedClusterState.get(); + }; + + final AtomicReference remainingNodesClusterState = new AtomicReference<>(); + final ZenDiscovery.NodeRemovalClusterStateTaskExecutor executor = + new ZenDiscovery.NodeRemovalClusterStateTaskExecutor(allocationService, electMasterService, rejoin, logger) { + @Override + ClusterState remainingNodesClusterState(ClusterState currentState, DiscoveryNodes.Builder remainingNodesBuilder) { + remainingNodesClusterState.set(super.remainingNodesClusterState(currentState, remainingNodesBuilder)); + return remainingNodesClusterState.get(); + } + }; + + final DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); + final int nodes = randomIntBetween(2, 16); + final List tasks = new ArrayList<>(); + // to ensure there is at least one removal + boolean first = true; + for (int i = 0; i < nodes; i++) { + final DiscoveryNode node = node(i); + builder.put(node); + if (first || randomBoolean()) { + tasks.add(new ZenDiscovery.NodeRemovalClusterStateTaskExecutor.Task(node, randomBoolean() ? "left" : "failed")); + } + first = false; + } + final ClusterState clusterState = ClusterState.builder(new ClusterName("test")).nodes(builder).build(); + + final ClusterStateTaskExecutor.BatchResult result = + executor.execute(clusterState, tasks); + verify(electMasterService).hasEnoughMasterNodes(eq(remainingNodesClusterState.get().nodes())); + verifyNoMoreInteractions(electMasterService); + + // ensure that we did not reroute + verifyNoMoreInteractions(allocationService); + assertTrue(rejoined.get()); + assertThat(result.resultingState, equalTo(rejoinedClusterState.get())); + + for (final ZenDiscovery.NodeRemovalClusterStateTaskExecutor.Task task : tasks) { + assertNull(result.resultingState.nodes().get(task.node().getId())); + } + } + + public void testRerouteAfterRemovingNodes() throws Exception { + final ElectMasterService electMasterService = mock(ElectMasterService.class); + when(electMasterService.hasEnoughMasterNodes(any(Iterable.class))).thenReturn(true); + + final AllocationService allocationService = mock(AllocationService.class); + when(allocationService.reroute(any(ClusterState.class), any(String.class))).thenReturn(mock(RoutingAllocation.Result.class)); + + final BiFunction rejoin = (cs, r) -> { + fail("rejoin should not be invoked"); + return cs; + }; + + final AtomicReference remainingNodesClusterState = new AtomicReference<>(); + final ZenDiscovery.NodeRemovalClusterStateTaskExecutor executor = + new ZenDiscovery.NodeRemovalClusterStateTaskExecutor(allocationService, electMasterService, rejoin, logger) { + @Override + ClusterState remainingNodesClusterState(ClusterState currentState, DiscoveryNodes.Builder remainingNodesBuilder) { + remainingNodesClusterState.set(super.remainingNodesClusterState(currentState, remainingNodesBuilder)); + return remainingNodesClusterState.get(); + } + }; + + final DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); + final int nodes = randomIntBetween(2, 16); + final List tasks = new ArrayList<>(); + // to ensure that there is at least one removal + boolean first = true; + for (int i = 0; i < nodes; i++) { + final DiscoveryNode node = node(i); + builder.put(node); + if (first || randomBoolean()) { + tasks.add(new ZenDiscovery.NodeRemovalClusterStateTaskExecutor.Task(node, randomBoolean() ? "left" : "failed")); + } + first = false; + } + final ClusterState clusterState = ClusterState.builder(new ClusterName("test")).nodes(builder).build(); + + final ClusterStateTaskExecutor.BatchResult result = + executor.execute(clusterState, tasks); + verify(electMasterService).hasEnoughMasterNodes(eq(remainingNodesClusterState.get().nodes())); + verifyNoMoreInteractions(electMasterService); + + verify(allocationService).reroute(eq(remainingNodesClusterState.get()), any(String.class)); + + for (final ZenDiscovery.NodeRemovalClusterStateTaskExecutor.Task task : tasks) { + assertNull(result.resultingState.nodes().get(task.node().getId())); + } + } + + private DiscoveryNode node(final int id) { + return new DiscoveryNode(Integer.toString(id), LocalTransportAddress.buildUnique(), Version.CURRENT); + } + +} diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java index fd0b11eae01..3d0d9ddd8b1 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryIT.java @@ -261,8 +261,8 @@ public class ZenDiscoveryIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable t) { - holder.set((IllegalStateException) t); + public void onFailure(Exception e) { + holder.set((IllegalStateException) e); } }); @@ -309,8 +309,8 @@ public class ZenDiscoveryIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable t) { - holder.set((IllegalStateException) t); + public void onFailure(Exception e) { + holder.set((IllegalStateException) e); } }); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java index a6638eb19cf..9db83f48f0e 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java @@ -24,8 +24,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.discovery.zen.ping.ZenPing; import org.elasticsearch.test.ESTestCase; @@ -51,9 +50,9 @@ public class ZenDiscoveryUnitTests extends ESTestCase { ClusterName clusterName = new ClusterName("abc"); DiscoveryNodes.Builder currentNodes = DiscoveryNodes.builder(); - currentNodes.masterNodeId("a").put(new DiscoveryNode("a", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT)); + currentNodes.masterNodeId("a").put(new DiscoveryNode("a", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)); DiscoveryNodes.Builder newNodes = DiscoveryNodes.builder(); - newNodes.masterNodeId("a").put(new DiscoveryNode("a", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT)); + newNodes.masterNodeId("a").put(new DiscoveryNode("a", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)); ClusterState.Builder currentState = ClusterState.builder(clusterName); currentState.nodes(currentNodes); @@ -71,7 +70,7 @@ public class ZenDiscoveryUnitTests extends ESTestCase { assertFalse("should not ignore, because new state's version is higher to current state's version", shouldIgnoreOrRejectNewClusterState(logger, currentState.build(), newState.build())); currentNodes = DiscoveryNodes.builder(); - currentNodes.masterNodeId("b").put(new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT)); + currentNodes.masterNodeId("b").put(new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT)); ; // version isn't taken into account, so randomize it to ensure this. if (randomBoolean()) { @@ -109,7 +108,7 @@ public class ZenDiscoveryUnitTests extends ESTestCase { ArrayList allNodes = new ArrayList<>(); for (int i = randomIntBetween(10, 20); i >= 0; i--) { Set roles = new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values()))); - DiscoveryNode node = new DiscoveryNode("node_" + i, "id_" + i, DummyTransportAddress.INSTANCE, Collections.emptyMap(), + DiscoveryNode node = new DiscoveryNode("node_" + i, "id_" + i, LocalTransportAddress.buildUnique(), Collections.emptyMap(), roles, Version.CURRENT); responses.add(new ZenPing.PingResponse(node, randomBoolean() ? null : node, new ClusterName("test"), randomBoolean())); allNodes.add(node); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenPingTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenPingTests.java index 8aa5114c387..72674f44e3d 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenPingTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenPingTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.discovery.zen.ping.ZenPing; import org.elasticsearch.test.ESTestCase; @@ -42,7 +42,7 @@ public class ZenPingTests extends ESTestCase { boolean hasJoinedOncePerNode[] = new boolean[nodes.length]; ArrayList pings = new ArrayList<>(); for (int i = 0; i < nodes.length; i++) { - nodes[i] = new DiscoveryNode("" + i, DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + nodes[i] = new DiscoveryNode("" + i, LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); } for (int pingCount = scaledRandomIntBetween(10, nodes.length * 10); pingCount > 0; pingCount--) { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java index f072c5faf8a..6696174c08f 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.discovery.zen.ping.unicast; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -31,6 +30,7 @@ import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.discovery.zen.elect.ElectMasterService; import org.elasticsearch.discovery.zen.ping.PingContextProvider; import org.elasticsearch.discovery.zen.ping.ZenPing; @@ -39,11 +39,10 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.MockTcpTransport; import org.elasticsearch.transport.TransportConnectionListener; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportSettings; -import org.elasticsearch.transport.netty.NettyTransport; -import org.jboss.netty.util.internal.ConcurrentHashMap; import java.net.InetSocketAddress; import java.util.concurrent.ConcurrentMap; @@ -199,17 +198,12 @@ public class UnicastZenPingIT extends ESTestCase { private NetworkHandle startServices(Settings settings, ThreadPool threadPool, NetworkService networkService, String nodeId, Version version) { - NettyTransport transport = new NettyTransport(settings, threadPool, networkService, BigArrays.NON_RECYCLING_INSTANCE, - new NamedWriteableRegistry(), new NoneCircuitBreakerService()) { - @Override - protected Version getCurrentVersion() { - return version; - } - }; + MockTcpTransport transport = new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE, + new NoneCircuitBreakerService(), new NamedWriteableRegistry(), networkService, version); final TransportService transportService = new TransportService(settings, transport, threadPool); transportService.start(); transportService.acceptIncomingRequests(); - ConcurrentMap counters = new ConcurrentHashMap<>(); + ConcurrentMap counters = ConcurrentCollections.newConcurrentMap(); transportService.addConnectionListener(new TransportConnectionListener() { @Override public void onNodeConnected(DiscoveryNode node) { diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java index ab9aed6ba44..42aa792c95f 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java @@ -25,7 +25,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.discovery.zen.publish.PendingClusterStatesQueue.ClusterStateContext; import org.elasticsearch.test.ESTestCase; @@ -237,7 +237,7 @@ public class PendingClusterStatesQueueTests extends ESTestCase { ClusterState state = lastClusterStatePerMaster[masterIndex]; if (state == null) { state = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)).nodes(DiscoveryNodes.builder() - .put(new DiscoveryNode(masters[masterIndex], DummyTransportAddress.INSTANCE, + .put(new DiscoveryNode(masters[masterIndex], LocalTransportAddress.buildUnique(), emptyMap(), emptySet(),Version.CURRENT)).masterNodeId(masters[masterIndex]).build() ).build(); } else { @@ -259,8 +259,8 @@ public class PendingClusterStatesQueueTests extends ESTestCase { } @Override - public void onNewClusterStateFailed(Throwable t) { - failure = t; + public void onNewClusterStateFailed(Exception e) { + failure = e; } } diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java index 61374cc0d8f..7d72fa5c4dc 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Randomness; @@ -43,6 +42,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -152,16 +152,16 @@ public class PublishClusterStateActionTests extends ESTestCase { return createMockNode(name, settings, null); } - public MockNode createMockNode(String name, Settings settings, @Nullable ClusterStateListener listener) throws Exception { - settings = Settings.builder() + public MockNode createMockNode(String name, final Settings basSettings, @Nullable ClusterStateListener listener) throws Exception { + final Settings settings = Settings.builder() .put("name", name) .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "", TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") - .put(settings) + .put(basSettings) .build(); MockTransportService service = buildTransportService(settings); - DiscoveryNodeService discoveryNodeService = new DiscoveryNodeService(settings); - DiscoveryNode discoveryNode = discoveryNodeService.buildLocalNode(service.boundAddress().publishAddress()); + DiscoveryNode discoveryNode = DiscoveryNode.createLocal(settings, service.boundAddress().publishAddress(), + NodeEnvironment.generateNodeId(settings)); MockNode node = new MockNode(discoveryNode, service, listener, logger); node.action = buildPublishClusterStateAction(settings, service, () -> node.clusterState, node); final CountDownLatch latch = new CountDownLatch(nodes.size() * 2 + 1); @@ -797,9 +797,9 @@ public class PublishClusterStateActionTests extends ESTestCase { } @Override - public void onNodeAck(DiscoveryNode node, @Nullable Throwable t) { - if (t != null) { - errors.add(new Tuple<>(node, t)); + public void onNodeAck(DiscoveryNode node, @Nullable Exception e) { + if (e != null) { + errors.add(new Tuple<>(node, e)); } countDown.countDown(); } @@ -910,8 +910,8 @@ public class PublishClusterStateActionTests extends ESTestCase { } @Override - public void sendResponse(Throwable error) throws IOException { - this.error.set(error); + public void sendResponse(Exception exception) throws IOException { + this.error.set(exception); assertThat(response.get(), nullValue()); } diff --git a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index ad425d8afc9..50e05d97985 100644 --- a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -22,7 +22,6 @@ import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.store.LockObtainFailedException; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.PathUtils; @@ -48,12 +47,11 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.not; @LuceneTestCase.SuppressFileSystems("ExtrasFS") // TODO: fix test to allow extras public class NodeEnvironmentTests extends ESTestCase { @@ -269,9 +267,9 @@ public class NodeEnvironmentTests extends ESTestCase { if (randomBoolean()) { Thread t = new Thread(new AbstractRunnable() { @Override - public void onFailure(Throwable t) { - logger.error("unexpected error", t); - threadException.set(t); + public void onFailure(Exception e) { + logger.error("unexpected error", e); + threadException.set(e); latch.countDown(); blockLatch.countDown(); } @@ -392,7 +390,7 @@ public class NodeEnvironmentTests extends ESTestCase { env.close(); NodeEnvironment env2 = newNodeEnvironment(dataPaths, "/tmp", - Settings.builder().put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), false).build()); + Settings.builder().put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), false).build()); assertThat(env2.availableShardPaths(sid), equalTo(env2.availableShardPaths(sid))); assertThat(env2.resolveCustomLocation(s2, sid), equalTo(PathUtils.get("/tmp/foo/" + index.getUUID() + "/0"))); @@ -450,6 +448,27 @@ public class NodeEnvironmentTests extends ESTestCase { } } + public void testPersistentNodeId() throws IOException { + String[] paths = tmpPaths(); + NodeEnvironment env = newNodeEnvironment(paths, Settings.builder() + .put("node.local_storage", false) + .put("node.master", false) + .put("node.data", false) + .build()); + String nodeID = env.nodeId(); + env.close(); + env = newNodeEnvironment(paths, Settings.EMPTY); + assertThat("previous node didn't have local storage enabled, id should change", env.nodeId(), not(equalTo(nodeID))); + nodeID = env.nodeId(); + env.close(); + env = newNodeEnvironment(paths, Settings.EMPTY); + assertThat(env.nodeId(), equalTo(nodeID)); + env.close(); + env = newNodeEnvironment(Settings.EMPTY); + assertThat(env.nodeId(), not(equalTo(nodeID))); + env.close(); + } + /** Converts an array of Strings to an array of Paths, adding an additional child if specified */ private Path[] stringsToPaths(String[] strings, String additional) { Path[] locations = new Path[strings.length]; diff --git a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java index e87cc5f6571..6240a13a0bd 100644 --- a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsIntegrationIT.java @@ -169,18 +169,25 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase { assertThat(response.getAllFieldStats().get("byte").getMinValue(), equalTo(minByte)); assertThat(response.getAllFieldStats().get("byte").getMaxValue(), equalTo(maxByte)); + assertThat(response.getAllFieldStats().get("byte").getDisplayType(), equalTo("integer")); assertThat(response.getAllFieldStats().get("short").getMinValue(), equalTo(minShort)); assertThat(response.getAllFieldStats().get("short").getMaxValue(), equalTo(maxShort)); + assertThat(response.getAllFieldStats().get("short").getDisplayType(), equalTo("integer")); assertThat(response.getAllFieldStats().get("integer").getMinValue(), equalTo(minInt)); assertThat(response.getAllFieldStats().get("integer").getMaxValue(), equalTo(maxInt)); + assertThat(response.getAllFieldStats().get("integer").getDisplayType(), equalTo("integer")); assertThat(response.getAllFieldStats().get("long").getMinValue(), equalTo(minLong)); assertThat(response.getAllFieldStats().get("long").getMaxValue(), equalTo(maxLong)); + assertThat(response.getAllFieldStats().get("long").getDisplayType(), equalTo("integer")); assertThat(response.getAllFieldStats().get("half_float").getMinValue(), equalTo(minHalfFloat)); assertThat(response.getAllFieldStats().get("half_float").getMaxValue(), equalTo(maxHalfFloat)); + assertThat(response.getAllFieldStats().get("half_float").getDisplayType(), equalTo("float")); assertThat(response.getAllFieldStats().get("float").getMinValue(), equalTo(minFloat)); assertThat(response.getAllFieldStats().get("float").getMaxValue(), equalTo(maxFloat)); + assertThat(response.getAllFieldStats().get("float").getDisplayType(), equalTo("float")); assertThat(response.getAllFieldStats().get("double").getMinValue(), equalTo(minDouble)); assertThat(response.getAllFieldStats().get("double").getMaxValue(), equalTo(maxDouble)); + assertThat(response.getAllFieldStats().get("double").getDisplayType(), equalTo("float")); } public void testFieldStatsIndexLevel() throws Exception { @@ -207,6 +214,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase { assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1)); assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(-10L)); assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(300L)); + assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getDisplayType(), + equalTo("integer")); // Level: cluster response = client().prepareFieldStats().setFields("value").setLevel("cluster").get(); @@ -216,6 +225,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase { assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1)); assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMinValue(), equalTo(-10L)); assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getMaxValue(), equalTo(300L)); + assertThat(response.getIndicesMergedFieldStats().get("_all").get("value").getDisplayType(), + equalTo("integer")); // Level: indices response = client().prepareFieldStats().setFields("value").setLevel("indices").get(); @@ -228,6 +239,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase { assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(200L)); assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMinValue(), equalTo(201L)); assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getMaxValue(), equalTo(300L)); + assertThat(response.getIndicesMergedFieldStats().get("test3").get("value").getDisplayType(), + equalTo("integer")); // Illegal level option: try { @@ -259,7 +272,7 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase { assertThat(response.getIndicesMergedFieldStats().get("_all").size(), equalTo(0)); assertThat(response.getConflicts().size(), equalTo(1)); assertThat(response.getConflicts().get("value"), - equalTo("Field [value] of type [whole-number] conflicts with existing field of type [text] " + + equalTo("Field [value] of type [integer] conflicts with existing field of type [string] " + "in other index.")); response = client().prepareFieldStats().setFields("value").setLevel("indices").get(); @@ -296,7 +309,7 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase { assertThat(response.getIndicesMergedFieldStats().get("_all").get("value2").getMaxValue(), equalTo(1L)); assertThat(response.getConflicts().size(), equalTo(1)); assertThat(response.getConflicts().get("value"), - equalTo("Field [value] of type [whole-number] conflicts with existing field of type [text] " + + equalTo("Field [value] of type [integer] conflicts with existing field of type [string] " + "in other index.")); response = client().prepareFieldStats().setFields("value", "value2").setLevel("indices").get(); @@ -310,6 +323,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase { equalTo(new BytesRef("a"))); assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMaxValue(), equalTo(new BytesRef("b"))); + assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getDisplayType(), + equalTo("string")); } public void testFieldStatsFiltering() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java index 20db1bf20c4..bfee11f7f0e 100644 --- a/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java +++ b/core/src/test/java/org/elasticsearch/fieldstats/FieldStatsTests.java @@ -89,6 +89,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase { equalTo(String.format(Locale.ENGLISH, "%03d", 0))); assertThat(result.getAllFieldStats().get("field").getMaxValueAsString(), equalTo(String.format(Locale.ENGLISH, "%03d", 10))); + assertThat(result.getAllFieldStats().get("field").getDisplayType(), + equalTo("string")); } public void testDouble() { @@ -106,6 +108,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase { assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(-1d)); assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(9d)); assertThat(result.getAllFieldStats().get(fieldName).getMinValueAsString(), equalTo(Double.toString(-1))); + assertThat(result.getAllFieldStats().get(fieldName).getDisplayType(), equalTo("float")); } public void testHalfFloat() { @@ -124,6 +127,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase { assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(9d)); assertThat(result.getAllFieldStats().get(fieldName).getMinValueAsString(), equalTo(Float.toString(-1))); assertThat(result.getAllFieldStats().get(fieldName).getMaxValueAsString(), equalTo(Float.toString(9))); + assertThat(result.getAllFieldStats().get(fieldName).getDisplayType(), equalTo("float")); } public void testFloat() { @@ -169,6 +173,11 @@ public class FieldStatsTests extends ESSingleNodeTestCase { equalTo(java.lang.Long.toString(max))); assertThat(result.getAllFieldStats().get(fieldName).isSearchable(), equalTo(true)); assertThat(result.getAllFieldStats().get(fieldName).isAggregatable(), equalTo(true)); + if (fieldType.equals("float") || fieldType.equals("double") || fieldType.equals("half-float")) { + assertThat(result.getAllFieldStats().get(fieldName).getDisplayType(), equalTo("float")); + } else { + assertThat(result.getAllFieldStats().get(fieldName).getDisplayType(), equalTo("integer")); + } client().admin().indices().prepareDelete("test").get(); client().admin().indices().prepareDelete("test1").get(); @@ -191,6 +200,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase { assertThat(stat.getSumTotalTermFreq(), equalTo(4L)); assertThat(stat.isSearchable(), equalTo(true)); assertThat(stat.isAggregatable(), equalTo(false)); + assertThat(stat.getDisplayType(), equalTo("integer")); } public void testMerge_notAvailable() { @@ -209,6 +219,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase { assertThat(stat.getSumTotalTermFreq(), equalTo(-1L)); assertThat(stat.isSearchable(), equalTo(true)); assertThat(stat.isAggregatable(), equalTo(true)); + assertThat(stat.getDisplayType(), equalTo("integer")); stats.add(new FieldStats.Long(1, -1L, -1L, -1L, true, true, 1L, 1L)); stat = stats.remove(0); @@ -221,6 +232,7 @@ public class FieldStatsTests extends ESSingleNodeTestCase { assertThat(stat.getSumTotalTermFreq(), equalTo(-1L)); assertThat(stat.isSearchable(), equalTo(true)); assertThat(stat.isAggregatable(), equalTo(true)); + assertThat(stat.getDisplayType(), equalTo("integer")); } public void testNumberFiltering() { @@ -350,6 +362,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase { equalTo(dateTime1Str)); assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str)); + assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getDisplayType(), + equalTo("date")); response = client().prepareFieldStats() .setFields("value") @@ -370,6 +384,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase { equalTo(dateTime1.getMillis())); assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getMinValueAsString(), equalTo(dateTime1Str)); + assertThat(response.getIndicesMergedFieldStats().get("test1").get("value").getDisplayType(), + equalTo("date")); response = client().prepareFieldStats() .setFields("value") @@ -402,6 +418,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase { equalTo(dateTime2.getMillis())); assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str)); + assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getDisplayType(), + equalTo("date")); response = client().prepareFieldStats() .setFields("value") @@ -417,6 +435,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase { equalTo(dateTime1Str)); assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str)); + assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getDisplayType(), + equalTo("date")); response = client().prepareFieldStats() .setFields("value") @@ -432,6 +452,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase { equalTo(dateTime1Str)); assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo(dateTime2Str)); + assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getDisplayType(), + equalTo("date")); } public void testDateFiltering_optionalFormat() { @@ -453,6 +475,8 @@ public class FieldStatsTests extends ESSingleNodeTestCase { assertThat(response.getIndicesMergedFieldStats().size(), equalTo(1)); assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getMinValueAsString(), equalTo("2014-01-02T00:00:00.000Z")); + assertThat(response.getIndicesMergedFieldStats().get("test2").get("value").getDisplayType(), + equalTo("date")); try { client().prepareFieldStats() @@ -487,6 +511,6 @@ public class FieldStatsTests extends ESSingleNodeTestCase { .get(); assertThat(response.getAllFieldStats().size(), equalTo(1)); assertThat(response.getAllFieldStats().get("_type").isSearchable(), equalTo(true)); - // assertThat(response.getAllFieldStats().get("_type").isAggregatable(), equalTo(true)); + assertThat(response.getAllFieldStats().get("_type").isAggregatable(), equalTo(true)); } } diff --git a/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java b/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java index 948f4820439..092e6eaff8a 100644 --- a/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/AsyncShardFetchTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -45,11 +45,11 @@ import static org.hamcrest.Matchers.sameInstance; /** */ public class AsyncShardFetchTests extends ESTestCase { - private final DiscoveryNode node1 = new DiscoveryNode("node1", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + private final DiscoveryNode node1 = new DiscoveryNode("node1", LocalTransportAddress.buildUnique(), Collections.emptyMap(), Collections.singleton(DiscoveryNode.Role.DATA), Version.CURRENT); private final Response response1 = new Response(node1); private final Throwable failure1 = new Throwable("simulated failure 1"); - private final DiscoveryNode node2 = new DiscoveryNode("node2", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + private final DiscoveryNode node2 = new DiscoveryNode("node2", LocalTransportAddress.buildUnique(), Collections.emptyMap(), Collections.singleton(DiscoveryNode.Role.DATA), Version.CURRENT); private final Response response2 = new Response(node2); private final Throwable failure2 = new Throwable("simulate failure 2"); @@ -292,7 +292,7 @@ public class AsyncShardFetchTests extends ESTestCase { } else { processAsyncFetch(shardId, Collections.singletonList(entry.response), null); } - } catch (Throwable e) { + } catch (Exception e) { logger.error("unexpected failure", e); } finally { if (entry != null) { diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java index 129495ea15e..7ea916f4a52 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.Client; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterState; @@ -76,9 +77,6 @@ public class GatewayIndexStateIT extends ESIntegTestCase { .field("required", true).endObject().endObject().endObject()) .execute().actionGet(); - logger.info("--> waiting for yellow status"); - ensureYellow(); - logger.info("--> verify meta _routing required exists"); MappingMetaData mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metaData() .index("test").mapping("type1"); @@ -205,7 +203,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase { internalCluster().startNode(Settings.builder().put(Node.NODE_DATA_SETTING.getKey(), false).build()); logger.info("--> create an index"); - client().admin().indices().prepareCreate("test").execute().actionGet(); + client().admin().indices().prepareCreate("test").setWaitForActiveShards(ActiveShardCount.NONE).execute().actionGet(); logger.info("--> closing master node"); internalCluster().closeNonSharedNodes(false); @@ -233,9 +231,6 @@ public class GatewayIndexStateIT extends ESIntegTestCase { logger.info("--> create an index"); client().admin().indices().prepareCreate("test").execute().actionGet(); - logger.info("--> waiting for test index to be created"); - ensureYellow(); - client().prepareIndex("test", "type1").setSource("field1", "value1").setTimeout("100ms").execute().actionGet(); } @@ -346,7 +341,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase { logger.info("--> created temp data path for shadow replicas [{}]", dataPath); logger.info("--> starting a cluster with " + numNodes + " nodes"); final Settings nodeSettings = Settings.builder() - .put("node.add_id_to_custom_path", false) + .put("node.add_lock_id_to_custom_path", false) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), dataPath.toString()) .put("index.store.fs.fs_lock", randomFrom("native", "simple")) .build(); @@ -426,7 +421,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase { // this one is not validated ahead of time and breaks allocation .put("index.analysis.filter.myCollator.type", "icu_collation") ).build(); - IndexMetaData.FORMAT.write(brokenMeta, brokenMeta.getVersion(), services.indexPaths(brokenMeta.getIndex())); + IndexMetaData.FORMAT.write(brokenMeta, services.indexPaths(brokenMeta.getIndex())); } internalCluster().fullRestart(); // ensureGreen(closedIndex) waits for the index to show up in the metadata @@ -483,7 +478,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase { for (NodeEnvironment services : internalCluster().getInstances(NodeEnvironment.class)) { IndexMetaData brokenMeta = IndexMetaData.builder(metaData).settings(metaData.getSettings() .filter((s) -> "index.analysis.analyzer.test.tokenizer".equals(s) == false)).build(); - IndexMetaData.FORMAT.write(brokenMeta, brokenMeta.getVersion(), services.indexPaths(brokenMeta.getIndex())); + IndexMetaData.FORMAT.write(brokenMeta, services.indexPaths(brokenMeta.getIndex())); } internalCluster().fullRestart(); // ensureGreen(closedIndex) waits for the index to show up in the metadata @@ -521,7 +516,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase { MetaData brokenMeta = MetaData.builder(metaData).persistentSettings(Settings.builder() .put(metaData.persistentSettings()).put("this.is.unknown", true) .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), "broken").build()).build(); - MetaData.FORMAT.write(brokenMeta, metaData.version(), nodeEnv.nodeDataPaths()); + MetaData.FORMAT.write(brokenMeta, nodeEnv.nodeDataPaths()); } internalCluster().fullRestart(); ensureYellow("test"); // wait for state recovery diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java index 4f65c5fafdd..d2085ab9147 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayServiceTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.gateway; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; @@ -36,11 +35,8 @@ public class GatewayServiceTests extends ESTestCase { ClusterService clusterService = new ClusterService(Settings.builder().put("cluster.name", "GatewayServiceTests").build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), null); - return new GatewayService(Settings.builder() - .put("http.enabled", "false") - .put("discovery.type", "local") - .put(settings.build()).build(), - null, clusterService, null, null, null, null, new NoopDiscovery(), null, null); + return new GatewayService(settings.build(), + null, clusterService, null, null, null, new NoopDiscovery(), null, null); } public void testDefaultRecoverAfterTime() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index 41eba406009..4cf505d839a 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -104,7 +104,7 @@ public class MetaDataStateFormatTests extends ESTestCase { Format format = new Format(randomFrom(XContentType.values()), "foo-"); DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); int version = between(0, Integer.MAX_VALUE/2); - format.write(state, version, dirs); + format.write(state, dirs); for (Path file : dirs) { Path[] list = content("*", file); assertEquals(list.length, 1); @@ -119,7 +119,7 @@ public class MetaDataStateFormatTests extends ESTestCase { } final int version2 = between(version, Integer.MAX_VALUE); DummyState state2 = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); - format.write(state2, version2, dirs); + format.write(state2, dirs); for (Path file : dirs) { Path[] list = content("*", file); @@ -146,7 +146,7 @@ public class MetaDataStateFormatTests extends ESTestCase { Format format = new Format(randomFrom(XContentType.values()), "foo-"); DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); int version = between(0, Integer.MAX_VALUE/2); - format.write(state, version, dirs); + format.write(state, dirs); for (Path file : dirs) { Path[] list = content("*", file); assertEquals(list.length, 1); @@ -170,7 +170,7 @@ public class MetaDataStateFormatTests extends ESTestCase { Format format = new Format(randomFrom(XContentType.values()), "foo-"); DummyState state = new DummyState(randomRealisticUnicodeOfCodepointLengthBetween(1, 1000), randomInt(), randomLong(), randomDouble(), randomBoolean()); int version = between(0, Integer.MAX_VALUE/2); - format.write(state, version, dirs); + format.write(state, dirs); for (Path file : dirs) { Path[] list = content("*", file); assertEquals(list.length, 1); @@ -261,7 +261,7 @@ public class MetaDataStateFormatTests extends ESTestCase { } } for (int j = numLegacy; j < numStates; j++) { - format.write(meta.get(j), j, dirs[i]); + format.write(meta.get(j), dirs[i]); if (randomBoolean() && (j < numStates - 1 || dirs.length > 0 && i != 0)) { // corrupt a file that we do not necessarily need here.... Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j + ".st"); corruptedFiles.add(file); diff --git a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java index 4999ef5eac5..795046ba10c 100644 --- a/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/MetaDataWriteDataNodesIT.java @@ -161,8 +161,8 @@ public class MetaDataWriteDataNodesIT extends ESIntegTestCase { logger.info("checking if meta state exists..."); try { assertTrue("Expecting meta state of index " + indexName + " to be on node " + nodeName, getIndicesMetaDataOnNode(nodeName).containsKey(indexName)); - } catch (Throwable t) { - logger.info("failed to load meta state", t); + } catch (Exception e) { + logger.info("failed to load meta state", e); fail("could not load meta state"); } } diff --git a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java index 96e360550af..e86fa6f014b 100644 --- a/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/PrimaryShardAllocatorTests.java @@ -21,10 +21,12 @@ package org.elasticsearch.gateway; import org.apache.lucene.index.CorruptIndexException; import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.health.ClusterStateHealth; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RestoreSource; @@ -41,6 +43,7 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardStateMetaData; import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.test.ESAllocationTestCase; import org.junit.Before; @@ -50,6 +53,7 @@ import java.util.Map; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; /** */ @@ -77,6 +81,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().iterator().next().shardId(), equalTo(shardId)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -90,9 +95,10 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.V_2_1_0); } boolean changed = testAllocator.allocateUnassigned(allocation); - assertThat(changed, equalTo(false)); + assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -108,9 +114,10 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { } testAllocator.addData(node1, ShardStateMetaData.NO_VERSION, null, randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); - assertThat(changed, equalTo(false)); + assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -120,9 +127,10 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { RoutingAllocation allocation = routingAllocationWithOnePrimaryNoReplicas(yesAllocationDeciders(), false, Version.CURRENT, "id2"); testAllocator.addData(node1, ShardStateMetaData.NO_VERSION, "id1", randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); - assertThat(changed, equalTo(false)); + assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -137,6 +145,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(node1.getId())); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -152,9 +161,10 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { testAllocator.addData(node1, 3, null, randomBoolean(), new CorruptIndexException("test", "test")); } boolean changed = testAllocator.allocateUnassigned(allocation); - assertThat(changed, equalTo(false)); + assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -179,6 +189,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { // check that allocation id is reused assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).allocationId().getId(), equalTo("allocId1")); } + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -197,6 +208,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); DiscoveryNode allocatedNode = node1HasPrimaryShard ? node1 : node2; assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(allocatedNode.getId())); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -213,9 +225,10 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { testAllocator.addData(node1, 3, null, randomBoolean()); } boolean changed = testAllocator.allocateUnassigned(allocation); - assertThat(changed, equalTo(false)); + assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -236,6 +249,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(node1.getId())); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -249,6 +263,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(node2.getId())); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -268,6 +283,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(node2.getId())); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).allocationId().getId(), equalTo("some allocId")); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -285,6 +301,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -299,8 +316,9 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { RoutingAllocation allocation = getRestoreRoutingAllocation(throttleAllocationDeciders(), clusterHasActiveAllocationIds); testAllocator.addData(node1, legacyVersion, allocationId, randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); - assertThat(changed, equalTo(false)); + assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(false)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -318,6 +336,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -331,6 +350,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); assertThat(allocation.routingNodes().unassigned().size(), equalTo(1)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } private RoutingAllocation getRestoreRoutingAllocation(AllocationDeciders allocationDeciders, boolean hasActiveAllocation) { @@ -365,6 +385,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.RED); } /** @@ -378,8 +399,9 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { RoutingAllocation allocation = getRecoverOnAnyNodeRoutingAllocation(throttleAllocationDeciders(), hasActiveAllocation); testAllocator.addData(node1, legacyVersion, allocationId, randomBoolean()); boolean changed = testAllocator.allocateUnassigned(allocation); - assertThat(changed, equalTo(false)); + assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(false)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } /** @@ -396,6 +418,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.RED); } /** @@ -409,6 +432,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(changed, equalTo(false)); assertThat(allocation.routingNodes().unassigned().ignored().isEmpty(), equalTo(true)); assertThat(allocation.routingNodes().unassigned().size(), equalTo(1)); + assertClusterHealthStatus(allocation, ClusterHealthStatus.YELLOW); } private RoutingAllocation getRecoverOnAnyNodeRoutingAllocation(AllocationDeciders allocationDeciders, boolean hasActiveAllocation) { @@ -448,18 +472,20 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { RoutingAllocation allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); boolean changed = testAllocator.allocateUnassigned(allocation); - assertThat(changed, equalTo(false)); + assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas + assertClusterHealthStatus(allocation, ClusterHealthStatus.RED); testAllocator.addData(node1, 1, null, randomBoolean()); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); changed = testAllocator.allocateUnassigned(allocation); - assertThat(changed, equalTo(false)); + assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas + assertClusterHealthStatus(allocation, ClusterHealthStatus.RED); testAllocator.addData(node2, 1, null, randomBoolean()); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); @@ -469,6 +495,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), anyOf(equalTo(node2.getId()), equalTo(node1.getId()))); + assertClusterHealthStatus(allocation, ClusterHealthStatus.RED); } /** @@ -489,18 +516,20 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { RoutingAllocation allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); boolean changed = testAllocator.allocateUnassigned(allocation); - assertThat(changed, equalTo(false)); + assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas + assertClusterHealthStatus(allocation, ClusterHealthStatus.RED); testAllocator.addData(node1, 1, null, randomBoolean()); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); changed = testAllocator.allocateUnassigned(allocation); - assertThat(changed, equalTo(false)); + assertThat(changed, equalTo(true)); assertThat(allocation.routingNodes().unassigned().ignored().size(), equalTo(1)); assertThat(allocation.routingNodes().unassigned().ignored().get(0).shardId(), equalTo(shardId)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas + assertClusterHealthStatus(allocation, ClusterHealthStatus.RED); testAllocator.addData(node2, 2, null, randomBoolean()); allocation = new RoutingAllocation(yesAllocationDeciders(), new RoutingNodes(state, false), state, null, System.nanoTime(), false); @@ -510,6 +539,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.UNASSIGNED).size(), equalTo(2)); // replicas assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).size(), equalTo(1)); assertThat(allocation.routingNodes().shardsWithState(ShardRoutingState.INITIALIZING).get(0).currentNodeId(), equalTo(node2.getId())); + assertClusterHealthStatus(allocation, ClusterHealthStatus.RED); } private RoutingAllocation routingAllocationWithOnePrimaryNoReplicas(AllocationDeciders deciders, boolean asNew, Version version, String... activeAllocationIds) { @@ -530,6 +560,19 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { return new RoutingAllocation(deciders, new RoutingNodes(state, false), state, null, System.nanoTime(), false); } + private void assertClusterHealthStatus(RoutingAllocation allocation, ClusterHealthStatus expectedStatus) { + RoutingTable oldRoutingTable = allocation.routingTable(); + RoutingNodes newRoutingNodes = allocation.routingNodes(); + final RoutingTable newRoutingTable = new RoutingTable.Builder() + .updateNodes(oldRoutingTable.version(), newRoutingNodes) + .build(); + ClusterState clusterState = ClusterState.builder(new ClusterName("test-cluster")) + .routingTable(newRoutingTable) + .build(); + ClusterStateHealth clusterStateHealth = new ClusterStateHealth(clusterState); + assertThat(clusterStateHealth.getStatus().ordinal(), lessThanOrEqualTo(expectedStatus.ordinal())); + } + class TestAllocator extends PrimaryShardAllocator { private Map data; @@ -547,7 +590,7 @@ public class PrimaryShardAllocatorTests extends ESAllocationTestCase { return addData(node, version, allocationId, primary, null); } - public TestAllocator addData(DiscoveryNode node, long version, String allocationId, boolean primary, @Nullable Throwable storeException) { + public TestAllocator addData(DiscoveryNode node, long version, String allocationId, boolean primary, @Nullable Exception storeException) { if (data == null) { data = new HashMap<>(); } diff --git a/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java b/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java index d6e8d61a7a6..1e35bcdd469 100644 --- a/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/RecoverAfterNodesIT.java @@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.hasItem; @ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class RecoverAfterNodesIT extends ESIntegTestCase { - private final static TimeValue BLOCK_WAIT_TIMEOUT = TimeValue.timeValueSeconds(10); + private static final TimeValue BLOCK_WAIT_TIMEOUT = TimeValue.timeValueSeconds(10); public Set waitForNoBlocksOnNode(TimeValue timeout, Client nodeClient) throws InterruptedException { long start = System.currentTimeMillis(); diff --git a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index b417553a609..dedd26d68b5 100644 --- a/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -299,7 +299,7 @@ public class ReplicaShardAllocatorTests extends ESAllocationTestCase { .addShard(primaryShard) .addShard(ShardRouting.newUnassigned(shardId, null, false, new UnassignedInfo(reason, null, null, failedAllocations, System.nanoTime(), - System.currentTimeMillis(), delayed))) + System.currentTimeMillis(), delayed, UnassignedInfo.AllocationStatus.NO_ATTEMPT))) .build()) ) .build(); diff --git a/core/src/test/java/org/elasticsearch/http/HttpServerTests.java b/core/src/test/java/org/elasticsearch/http/HttpServerTests.java index 2ba7da84c14..f9de466e978 100644 --- a/core/src/test/java/org/elasticsearch/http/HttpServerTests.java +++ b/core/src/test/java/org/elasticsearch/http/HttpServerTests.java @@ -18,9 +18,10 @@ */ package org.elasticsearch.http; -import org.elasticsearch.cluster.service.ClusterService; +import java.util.Collections; +import java.util.Map; + import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.bytes.ByteBufferBytesReference; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -33,7 +34,6 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; -import org.elasticsearch.node.service.NodeService; import org.elasticsearch.rest.AbstractRestChannel; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; @@ -43,10 +43,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.junit.Before; -import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; -import java.util.Map; - public class HttpServerTests extends ESTestCase { private static final ByteSizeValue BREAKER_LIMIT = new ByteSizeValue(20); private HttpServer httpServer; @@ -64,19 +60,15 @@ public class HttpServerTests extends ESTestCase { inFlightRequestsBreaker = circuitBreakerService.getBreaker(CircuitBreaker.IN_FLIGHT_REQUESTS); HttpServerTransport httpServerTransport = new TestHttpServerTransport(); - RestController restController = new RestController(settings); + RestController restController = new RestController(settings, Collections.emptySet()); restController.registerHandler(RestRequest.Method.GET, "/", - (request, channel) -> channel.sendResponse( + (request, channel, client) -> channel.sendResponse( new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY))); - restController.registerHandler(RestRequest.Method.GET, "/error", (request, channel) -> { + restController.registerHandler(RestRequest.Method.GET, "/error", (request, channel, client) -> { throw new IllegalArgumentException("test error"); }); - ClusterService clusterService = new ClusterService(Settings.EMPTY, - new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), null); - NodeService nodeService = new NodeService(Settings.EMPTY, null, null, null, null, null, null, null, null, - clusterService, null); - httpServer = new HttpServer(settings, httpServerTransport, restController, nodeService, circuitBreakerService); + httpServer = new HttpServer(settings, httpServerTransport, restController, null, circuitBreakerService); httpServer.start(); } @@ -129,7 +121,7 @@ public class HttpServerTests extends ESTestCase { assertEquals(0, inFlightRequestsBreaker.getUsed()); } - private static final class TestHttpServerTransport extends AbstractLifecycleComponent implements + private static final class TestHttpServerTransport extends AbstractLifecycleComponent implements HttpServerTransport { public TestHttpServerTransport() { @@ -202,7 +194,7 @@ public class HttpServerTests extends ESTestCase { private TestRestRequest(String path, String content) { this.path = path; - this.content = new ByteBufferBytesReference(ByteBuffer.wrap(content.getBytes(StandardCharsets.UTF_8))); + this.content = new BytesArray(content); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 2769534aee0..91e9bb2c016 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -193,7 +193,6 @@ public class IndexModuleTests extends ESTestCase { IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, settings); IndexModule module = new IndexModule(indexSettings, null, new AnalysisRegistry(environment, emptyMap(), emptyMap(), emptyMap(), emptyMap())); - Consumer listener = (s) -> {}; module.addIndexEventListener(eventListener); IndexService indexService = module.newIndexService(nodeEnvironment, deleter, nodeServicesProvider, indicesQueryCache, mapperRegistry, new IndicesFieldDataCache(settings, this.listener)); diff --git a/core/src/test/java/org/elasticsearch/index/IndexRequestBuilderIT.java b/core/src/test/java/org/elasticsearch/index/IndexRequestBuilderIT.java index c41051ec59c..84a9fe2970b 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexRequestBuilderIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexRequestBuilderIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.index; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; @@ -35,7 +36,6 @@ import static org.hamcrest.Matchers.containsString; public class IndexRequestBuilderIT extends ESIntegTestCase { public void testSetSource() throws InterruptedException, ExecutionException { createIndex("test"); - ensureYellow(); Map map = new HashMap<>(); map.put("test_field", "foobar"); IndexRequestBuilder[] builders = new IndexRequestBuilder[] { @@ -43,7 +43,7 @@ public class IndexRequestBuilderIT extends ESIntegTestCase { client().prepareIndex("test", "test").setSource("{\"test_field\" : \"foobar\"}"), client().prepareIndex("test", "test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}")), client().prepareIndex("test", "test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}")), - client().prepareIndex("test", "test").setSource(new BytesArray("{\"test_field\" : \"foobar\"}").toBytes()), + client().prepareIndex("test", "test").setSource(BytesReference.toBytes(new BytesArray("{\"test_field\" : \"foobar\"}"))), client().prepareIndex("test", "test").setSource(map) }; indexRandom(true, builders); diff --git a/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java index 97258b12a3b..22324e1ff2b 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -77,7 +77,6 @@ public class IndexServiceTests extends ESSingleNodeTestCase { public void testFilteringAliases() throws Exception { IndexService indexService = createIndex("test", Settings.EMPTY); - IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); add(indexService, "dogs", filter(termQuery("animal", "dog"))); add(indexService, "all", null); @@ -101,7 +100,6 @@ public class IndexServiceTests extends ESSingleNodeTestCase { public void testAliasFilters() throws Exception { IndexService indexService = createIndex("test", Settings.EMPTY); - IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); add(indexService, "dogs", filter(termQuery("animal", "dog"))); @@ -118,7 +116,6 @@ public class IndexServiceTests extends ESSingleNodeTestCase { public void testRemovedAliasFilter() throws Exception { IndexService indexService = createIndex("test", Settings.EMPTY); - IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); remove(indexService, "cats"); @@ -132,7 +129,6 @@ public class IndexServiceTests extends ESSingleNodeTestCase { public void testUnknownAliasFilter() throws Exception { IndexService indexService = createIndex("test", Settings.EMPTY); - IndexShard shard = indexService.getShard(0); add(indexService, "cats", filter(termQuery("animal", "cat"))); add(indexService, "dogs", filter(termQuery("animal", "dog"))); diff --git a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java index f0e12abeac8..9f2ec74329b 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java +++ b/core/src/test/java/org/elasticsearch/index/IndexWithShadowReplicasIT.java @@ -94,7 +94,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { private Settings nodeSettings(String dataPath) { return Settings.builder() - .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), false) + .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), false) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), dataPath) .put(FsDirectoryService.INDEX_LOCK_FACTOR_SETTING.getKey(), randomFrom("native", "simple")) .build(); @@ -277,7 +277,6 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { .build(); prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get(); - ensureYellow(IDX); client().prepareIndex(IDX, "doc", "1").setSource("foo", "bar").get(); client().prepareIndex(IDX, "doc", "2").setSource("foo", "bar").get(); @@ -335,7 +334,6 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { .build(); prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get(); - ensureYellow(IDX); client().prepareIndex(IDX, "doc", "1").setSource("foo", "bar").get(); client().prepareIndex(IDX, "doc", "2").setSource("foo", "bar").get(); @@ -379,7 +377,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { assertThat(gResp2.getField("foo").getValue().toString(), equalTo("bar")); } - public void testPrimaryRelocationWithConcurrentIndexing() throws Throwable { + public void testPrimaryRelocationWithConcurrentIndexing() throws Exception { Path dataPath = createTempDir(); Settings nodeSettings = nodeSettings(dataPath); @@ -395,7 +393,6 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { .build(); prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get(); - ensureYellow(IDX); // Node1 has the primary, now node2 has the replica String node2 = internalCluster().startNode(nodeSettings); ensureGreen(IDX); @@ -408,7 +405,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { final int numPhase2Docs = scaledRandomIntBetween(25, 200); final CountDownLatch phase1finished = new CountDownLatch(1); final CountDownLatch phase2finished = new CountDownLatch(1); - final CopyOnWriteArrayList exceptions = new CopyOnWriteArrayList<>(); + final CopyOnWriteArrayList exceptions = new CopyOnWriteArrayList<>(); Thread thread = new Thread() { @Override public void run() { @@ -418,8 +415,8 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { final IndexResponse indexResponse = client().prepareIndex(IDX, "doc", Integer.toString(counter.incrementAndGet())).setSource("foo", "bar").get(); assertTrue(indexResponse.isCreated()); - } catch (Throwable t) { - exceptions.add(t); + } catch (Exception e) { + exceptions.add(e); } final int docCount = counter.get(); if (docCount == numPhase1Docs) { @@ -454,7 +451,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { public void testPrimaryRelocationWhereRecoveryFails() throws Exception { Path dataPath = createTempDir(); Settings nodeSettings = Settings.builder() - .put("node.add_id_to_custom_path", false) + .put("node.add_lock_id_to_custom_path", false) .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), dataPath) .build(); @@ -470,7 +467,6 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { .build(); prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get(); - ensureYellow(IDX); // Node1 has the primary, now node2 has the replica String node2 = internalCluster().startNode(nodeSettings); ensureGreen(IDX); @@ -677,7 +673,7 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { client().prepareIndex(IDX, "doc", "4").setSource("foo", "eggplant").get(); flushAndRefresh(IDX); - SearchResponse resp = client().prepareSearch(IDX).setQuery(matchAllQuery()).addFieldDataField("foo").addSort("foo", SortOrder.ASC).get(); + SearchResponse resp = client().prepareSearch(IDX).setQuery(matchAllQuery()).addDocValueField("foo").addSort("foo", SortOrder.ASC).get(); assertHitCount(resp, 4); assertOrderedSearchHits(resp, "2", "3", "4", "1"); SearchHit[] hits = resp.getHits().hits(); @@ -859,7 +855,6 @@ public class IndexWithShadowReplicasIT extends ESIntegTestCase { .build(); prepareCreate(IDX).setSettings(idxSettings).addMapping("doc", "foo", "type=text").get(); - ensureYellow(IDX); client().prepareIndex(IDX, "doc", "1").setSource("foo", "bar").get(); client().prepareIndex(IDX, "doc", "2").setSource("foo", "bar").get(); diff --git a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java index 000d2509ea8..4f16be6f541 100644 --- a/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java +++ b/core/src/test/java/org/elasticsearch/index/SettingsListenerIT.java @@ -45,8 +45,6 @@ public class SettingsListenerIT extends ESIntegTestCase { public static class SettingsListenerPlugin extends Plugin { private final SettingsTestingService service = new SettingsTestingService(); - private static final Setting SETTING = Setting.intSetting("index.test.new.setting", 0, - Property.Dynamic, Property.IndexScope); @Override public List> getSettings() { @@ -62,7 +60,7 @@ public class SettingsListenerIT extends ESIntegTestCase { } @Override - public Collection nodeModules() { + public Collection createGuiceModules() { return Collections.singletonList(new SettingsListenerModule(service)); } } diff --git a/core/src/test/java/org/elasticsearch/index/WaitUntilRefreshIT.java b/core/src/test/java/org/elasticsearch/index/WaitUntilRefreshIT.java index a0a51d01e03..959aba4c520 100644 --- a/core/src/test/java/org/elasticsearch/index/WaitUntilRefreshIT.java +++ b/core/src/test/java/org/elasticsearch/index/WaitUntilRefreshIT.java @@ -58,10 +58,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSear * Tests that requests with RefreshPolicy.WAIT_UNTIL will be visible when they return. */ public class WaitUntilRefreshIT extends ESIntegTestCase { - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put(NetworkModule.HTTP_ENABLED.getKey(), true).build(); - } @Override public Settings indexSettings() { diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index a0396b7abc6..a3fd266f603 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -297,7 +297,7 @@ public class InternalEngineTests extends ESTestCase { } Engine.EventListener listener = new Engine.EventListener() { @Override - public void onFailedEngine(String reason, @Nullable Throwable t) { + public void onFailedEngine(String reason, @Nullable Exception e) { // we don't need to notify anybody in this test } }; @@ -539,7 +539,7 @@ public class InternalEngineTests extends ESTestCase { public void testCommitStats() { Document document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); @@ -716,7 +716,7 @@ public class InternalEngineTests extends ESTestCase { // create a document Document document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); @@ -729,7 +729,7 @@ public class InternalEngineTests extends ESTestCase { // but, we can still get it (in realtime) Engine.GetResult getResult = engine.get(new Engine.Get(true, newUid("1"))); assertThat(getResult.exists(), equalTo(true)); - assertThat(getResult.source().source.toBytesArray(), equalTo(B_1.toBytesArray())); + assertThat(getResult.source().source, equalTo(B_1)); assertThat(getResult.docIdAndVersion(), nullValue()); getResult.release(); @@ -755,7 +755,7 @@ public class InternalEngineTests extends ESTestCase { // now do an update document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); - document.add(new Field(SourceFieldMapper.NAME, B_2.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_2), SourceFieldMapper.Defaults.FIELD_TYPE)); doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null); engine.index(new Engine.Index(newUid("1"), doc)); @@ -769,7 +769,7 @@ public class InternalEngineTests extends ESTestCase { // but, we can still get it (in realtime) getResult = engine.get(new Engine.Get(true, newUid("1"))); assertThat(getResult.exists(), equalTo(true)); - assertThat(getResult.source().source.toBytesArray(), equalTo(B_2.toBytesArray())); + assertThat(getResult.source().source, equalTo(B_2)); assertThat(getResult.docIdAndVersion(), nullValue()); getResult.release(); @@ -808,7 +808,7 @@ public class InternalEngineTests extends ESTestCase { // add it back document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); engine.index(new Engine.Index(newUid("1"), doc, Versions.MATCH_DELETED)); @@ -2092,7 +2092,7 @@ public class InternalEngineTests extends ESTestCase { } public void testShardNotAvailableExceptionWhenEngineClosedConcurrently() throws IOException, InterruptedException { - AtomicReference throwable = new AtomicReference<>(); + AtomicReference exception = new AtomicReference<>(); String operation = randomFrom("optimize", "refresh", "flush"); Thread mergeThread = new Thread() { @Override @@ -2115,8 +2115,8 @@ public class InternalEngineTests extends ESTestCase { break; } } - } catch (Throwable t) { - throwable.set(t); + } catch (Exception e) { + exception.set(e); stop = true; } } @@ -2125,8 +2125,8 @@ public class InternalEngineTests extends ESTestCase { mergeThread.start(); engine.close(); mergeThread.join(); - logger.info("exception caught: ", throwable.get()); - assertTrue("expected an Exception that signals shard is not available", TransportActions.isShardNotAvailableException(throwable.get())); + logger.info("exception caught: ", exception.get()); + assertTrue("expected an Exception that signals shard is not available", TransportActions.isShardNotAvailableException(exception.get())); } public void testCurrentTranslogIDisCommitted() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index ef443d1e102..672686926bd 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -242,7 +242,7 @@ public class ShadowEngineTests extends ESTestCase { } Engine.EventListener eventListener = new Engine.EventListener() { @Override - public void onFailedEngine(String reason, @Nullable Throwable t) { + public void onFailedEngine(String reason, @Nullable Exception e) { // we don't need to notify anybody in this test } }; @@ -500,7 +500,7 @@ public class ShadowEngineTests extends ESTestCase { public void testShadowEngineIgnoresWriteOperations() throws Exception { // create a document ParseContext.Document document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); try { replicaEngine.index(new Engine.Index(newUid("1"), doc)); @@ -538,7 +538,7 @@ public class ShadowEngineTests extends ESTestCase { // Now, add a document to the primary so we can test shadow engine deletes document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); primaryEngine.flush(); @@ -593,7 +593,7 @@ public class ShadowEngineTests extends ESTestCase { // create a document ParseContext.Document document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); @@ -612,7 +612,7 @@ public class ShadowEngineTests extends ESTestCase { // but, we can still get it (in realtime) Engine.GetResult getResult = primaryEngine.get(new Engine.Get(true, newUid("1"))); assertThat(getResult.exists(), equalTo(true)); - assertThat(getResult.source().source.toBytesArray(), equalTo(B_1.toBytesArray())); + assertThat(getResult.source().source, equalTo(B_1)); assertThat(getResult.docIdAndVersion(), nullValue()); getResult.release(); @@ -649,7 +649,7 @@ public class ShadowEngineTests extends ESTestCase { // now do an update document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); - document.add(new Field(SourceFieldMapper.NAME, B_2.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_2), SourceFieldMapper.Defaults.FIELD_TYPE)); doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); @@ -663,7 +663,7 @@ public class ShadowEngineTests extends ESTestCase { // but, we can still get it (in realtime) getResult = primaryEngine.get(new Engine.Get(true, newUid("1"))); assertThat(getResult.exists(), equalTo(true)); - assertThat(getResult.source().source.toBytesArray(), equalTo(B_2.toBytesArray())); + assertThat(getResult.source().source, equalTo(B_2)); assertThat(getResult.docIdAndVersion(), nullValue()); getResult.release(); @@ -720,7 +720,7 @@ public class ShadowEngineTests extends ESTestCase { // add it back document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); @@ -971,7 +971,7 @@ public class ShadowEngineTests extends ESTestCase { // create a document ParseContext.Document document = testDocumentWithTextField(); - document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE)); + document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); pEngine.index(new Engine.Index(newUid("1"), doc)); pEngine.flush(true, true); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java index f9fb5e77b70..9a8815e9398 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingDisabledTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; @@ -116,7 +115,7 @@ public class DynamicMappingDisabledTests extends ESSingleNodeTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { onFailureCalled.set(true); assertThat(e, instanceOf(IndexNotFoundException.class)); assertEquals(e.getMessage(), "no such index"); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIT.java similarity index 76% rename from core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIntegrationIT.java rename to core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIT.java index 8afdea27451..70d7daef717 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingIT.java @@ -18,10 +18,13 @@ */ package org.elasticsearch.index.mapper; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.TypeMissingException; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; @@ -29,8 +32,9 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -public class DynamicMappingIntegrationIT extends ESIntegTestCase { +public class DynamicMappingIT extends ESIntegTestCase { public void testConflictingDynamicMappings() { // we don't use indexRandom because the order of requests is important here @@ -96,8 +100,8 @@ public class DynamicMappingIntegrationIT extends ESIntegTestCase { try { startLatch.await(); assertTrue(client().prepareIndex("index", "type", id).setSource("field" + id, "bar").get().isCreated()); - } catch (Throwable t) { - error.compareAndSet(null, t); + } catch (Exception e) { + error.compareAndSet(null, e); } } }); @@ -120,4 +124,26 @@ public class DynamicMappingIntegrationIT extends ESIntegTestCase { } } + public void testAutoCreateWithDisabledDynamicMappings() throws Exception { + assertAcked(client().admin().indices().preparePutTemplate("my_template") + .setCreate(true) + .setTemplate("index_*") + .addMapping("foo", "field", "type=keyword") + .setSettings(Settings.builder().put("index.mapper.dynamic", false).build()) + .get()); + + // succeeds since 'foo' has an explicit mapping in the template + indexRandom(true, false, client().prepareIndex("index_1", "foo", "1").setSource("field", "abc")); + + // fails since 'bar' does not have an explicit mapping in the template and dynamic template creation is disabled + TypeMissingException e1 = expectThrows(TypeMissingException.class, + () -> client().prepareIndex("index_2", "bar", "1").setSource("field", "abc").get()); + assertEquals("type[bar] missing", e1.getMessage()); + assertEquals("trying to auto create mapping, but dynamic mapping is disabled", e1.getCause().getMessage()); + + // make sure no mappings were created for bar + GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().addIndices("index_2").get(); + assertFalse(getIndexResponse.mappings().containsKey("bar")); + } + } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java index 31b65e84aa0..0de3c5d4a30 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.mapper.object.DynamicTemplate; +import org.elasticsearch.index.mapper.object.DynamicTemplate.XContentFieldType; import org.elasticsearch.test.ESTestCase; import java.util.Collections; @@ -49,6 +50,31 @@ public class DynamicTemplateTests extends ESTestCase { assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", builder.string()); } + public void testParseUnknownMatchType() { + Map templateDef = new HashMap<>(); + templateDef.put("match_mapping_type", "short"); + templateDef.put("mapping", Collections.singletonMap("store", true)); + // if a wrong match type is specified, we ignore the template + assertNull(DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha5)); + } + + public void testMatchAllTemplate() { + Map templateDef = new HashMap<>(); + templateDef.put("match_mapping_type", "*"); + templateDef.put("mapping", Collections.singletonMap("store", true)); + DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha5); + assertTrue(template.match("a.b", "b", randomFrom(XContentFieldType.values()))); + } + + public void testMatchTypeTemplate() { + Map templateDef = new HashMap<>(); + templateDef.put("match_mapping_type", "string"); + templateDef.put("mapping", Collections.singletonMap("store", true)); + DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha5); + assertTrue(template.match("a.b", "b", XContentFieldType.STRING)); + assertFalse(template.match("a.b", "b", XContentFieldType.BOOLEAN)); + } + public void testSerialization() throws Exception { // type-based template Map templateDef = new HashMap<>(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index 68e59527982..37d0436c9db 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -32,7 +32,7 @@ import java.util.List; public abstract class FieldTypeTestCase extends ESTestCase { /** Abstraction for mutating a property of a MappedFieldType */ - public static abstract class Modifier { + public abstract static class Modifier { /** The name of the property that is being modified. Used in test failure messages. */ public final String property; /** true if this modifier only makes types incompatible in strict mode, false otherwise */ diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index c93c181f860..2afeb02499d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -103,7 +103,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase { // 2. already existing index IndexService indexService = createIndex("index2"); - expectThrows(ExecutionException.class, () -> { + e = expectThrows(ExecutionException.class, () -> { client().prepareIndex("index1", MapperService.DEFAULT_MAPPING, "2").setSource().execute().get(); }); throwable = ExceptionsHelper.unwrapCause(e.getCause()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index 165b49d3145..817dc6e50df 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -292,17 +293,17 @@ public class SimpleAllMapperTests extends ESSingleNodeTestCase { } DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); - String mapping = mappingBuilder.endObject().endObject().bytes().toUtf8(); + String mapping = mappingBuilder.endObject().endObject().bytes().utf8ToString(); logger.info("Mapping: {}", mapping); DocumentMapper docMapper = parser.parse("test", new CompressedXContent(mapping)); String builtMapping = docMapper.mappingSource().string(); // reparse it DocumentMapper builtDocMapper = parser.parse("test", new CompressedXContent(builtMapping)); - byte[] json = jsonBuilder().startObject() + byte[] json = BytesReference.toBytes(jsonBuilder().startObject() .field("foo", "bar") .field("foobar", "foobar") - .endObject().bytes().toBytes(); + .endObject().bytes()); Document doc = builtDocMapper.parse("test", "test", "1", new BytesArray(json)).rootDoc(); IndexableField[] fields = doc.getFields("_all"); if (enabled) { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java index fc8e2ba1872..4bf1d0c68f7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/binary/BinaryMappingTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.binary; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -79,7 +80,7 @@ public class BinaryMappingTests extends ESSingleNodeTestCase { try (StreamOutput compressed = CompressorFactory.COMPRESSOR.streamOutput(out)) { new BytesArray(binaryValue1).writeTo(compressed); } - final byte[] binaryValue2 = out.bytes().toBytes(); + final byte[] binaryValue2 = BytesReference.toBytes(out.bytes()); assertTrue(CompressorFactory.isCompressed(new BytesArray(binaryValue2))); for (byte[] value : Arrays.asList(binaryValue1, binaryValue2)) { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/ScaledFloatFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/ScaledFloatFieldMapperTests.java new file mode 100644 index 00000000000..aa14fc3495c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/ScaledFloatFieldMapperTests.java @@ -0,0 +1,339 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.core; + +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.junit.Before; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; + +public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { + + IndexService indexService; + DocumentMapperParser parser; + + @Before + public void before() { + indexService = createIndex("test"); + parser = indexService.mapperService().documentMapperParser(); + } + + public void testDefaults() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "scaled_float") + .field("scaling_factor", 10.0).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertFalse(pointField.fieldType().stored()); + assertEquals(1230, pointField.numericValue().longValue()); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + assertEquals(1230, dvField.numericValue().longValue()); + assertFalse(dvField.fieldType().stored()); + } + + public void testMissingScalingFactor() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "scaled_float").endObject().endObject() + .endObject().endObject().string(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("Field [field] misses required parameter [scaling_factor]", e.getMessage()); + } + + public void testIllegalScalingFactor() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "scaled_float") + .field("scaling_factor", -1).endObject().endObject() + .endObject().endObject().string(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("[scaling_factor] must be a positive number, got [-1.0]", e.getMessage()); + } + + public void testNotIndexed() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "scaled_float") + .field("index", false).field("scaling_factor", 10.0).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + IndexableField dvField = fields[0]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + assertEquals(1230, dvField.numericValue().longValue()); + } + + public void testNoDocValues() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "scaled_float") + .field("doc_values", false).field("scaling_factor", 10.0).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertEquals(1230, pointField.numericValue().longValue()); + } + + public void testStore() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "scaled_float") + .field("store", true).field("scaling_factor", 10.0).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(3, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertEquals(1230, pointField.numericValue().doubleValue(), 0d); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + IndexableField storedField = fields[2]; + assertTrue(storedField.fieldType().stored()); + assertEquals(1230, storedField.numericValue().longValue()); + } + + public void testCoerce() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "scaled_float") + .field("scaling_factor", 10.0).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "123") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertEquals(1230, pointField.numericValue().longValue()); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "scaled_float") + .field("scaling_factor", 10.0).field("coerce", false).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper2.mappingSource().toString()); + + ThrowingRunnable runnable = () -> mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "123") + .endObject() + .bytes()); + MapperParsingException e = expectThrows(MapperParsingException.class, runnable); + assertThat(e.getCause().getMessage(), containsString("passed as String")); + } + + public void testIgnoreMalformed() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "scaled_float") + .field("scaling_factor", 10.0).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ThrowingRunnable runnable = () -> mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "a") + .endObject() + .bytes()); + MapperParsingException e = expectThrows(MapperParsingException.class, runnable); + assertThat(e.getCause().getMessage(), containsString("For input string: \"a\"")); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "scaled_float") + .field("scaling_factor", 10.0).field("ignore_malformed", true).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); + + ParsedDocument doc = mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", "a") + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(0, fields.length); + } + + public void testIncludeInAll() throws Exception { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", "scaled_float") + .field("scaling_factor", 10.0).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject() + .bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("_all"); + assertEquals(1, fields.length); + assertEquals("123", fields[0].stringValue()); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "scaled_float").field("scaling_factor", 10.0) + .field("include_in_all", false).endObject().endObject() + .endObject().endObject().string(); + + mapper = parser.parse("type", new CompressedXContent(mapping)); + + assertEquals(mapping, mapper.mappingSource().toString()); + + doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .field("field", 123) + .endObject() + .bytes()); + + fields = doc.rootDoc().getFields("_all"); + assertEquals(0, fields.length); + } + + public void testNullValue() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "scaled_float") + .field("scaling_factor", 10.0) + .endObject() + .endObject() + .endObject().endObject().string(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject() + .bytes()); + assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); + + mapping = XContentFactory.jsonBuilder().startObject() + .startObject("type") + .startObject("properties") + .startObject("field") + .field("type", "scaled_float") + .field("scaling_factor", 10.0) + .field("null_value", 2.5) + .endObject() + .endObject() + .endObject().endObject().string(); + + mapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, mapper.mappingSource().toString()); + + doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .nullField("field") + .endObject() + .bytes()); + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(1, pointField.fieldType().pointDimensionCount()); + assertFalse(pointField.fieldType().stored()); + assertEquals(25, pointField.numericValue().longValue()); + IndexableField dvField = fields[1]; + assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); + assertFalse(dvField.fieldType().stored()); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/ScaledFloatFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/ScaledFloatFieldTypeTests.java new file mode 100644 index 00000000000..2b05c4cc589 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/ScaledFloatFieldTypeTests.java @@ -0,0 +1,205 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper.core; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.Version; +import org.elasticsearch.action.fieldstats.FieldStats; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.fielddata.AtomicNumericFieldData; +import org.elasticsearch.index.fielddata.IndexNumericFieldData; +import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.index.mapper.FieldTypeTestCase; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; + +public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { + + @Override + protected MappedFieldType createDefaultFieldType() { + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(); + ft.setScalingFactor(100); + return ft; + } + + @Before + public void setupProperties() { + addModifier(new Modifier("scaling_factor", false) { + @Override + public void modify(MappedFieldType ft) { + ScaledFloatFieldMapper.ScaledFloatFieldType tft = (ScaledFloatFieldMapper.ScaledFloatFieldType)ft; + tft.setScalingFactor(10); + } + @Override + public void normalizeOther(MappedFieldType other) { + super.normalizeOther(other); + ((ScaledFloatFieldMapper.ScaledFloatFieldType) other).setScalingFactor(100); + } + }); + } + + public void testTermQuery() { + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(); + ft.setName("scaled_float"); + ft.setScalingFactor(0.1 + randomDouble() * 100); + double value = (randomDouble() * 2 - 1) * 10000; + long scaledValue = Math.round(value * ft.getScalingFactor()); + assertEquals(LongPoint.newExactQuery("scaled_float", scaledValue), ft.termQuery(value, null)); + } + + public void testTermsQuery() { + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(); + ft.setName("scaled_float"); + ft.setScalingFactor(0.1 + randomDouble() * 100); + double value1 = (randomDouble() * 2 - 1) * 10000; + long scaledValue1 = Math.round(value1 * ft.getScalingFactor()); + double value2 = (randomDouble() * 2 - 1) * 10000; + long scaledValue2 = Math.round(value2 * ft.getScalingFactor()); + assertEquals( + LongPoint.newSetQuery("scaled_float", scaledValue1, scaledValue2), + ft.termsQuery(Arrays.asList(value1, value2), null)); + } + + public void testRangeQuery() throws IOException { + // make sure the accuracy loss of scaled floats only occurs at index time + // this test checks that searching scaled floats yields the same results as + // searching doubles that are rounded to the closest half float + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(); + ft.setName("scaled_float"); + ft.setScalingFactor(0.1 + randomDouble() * 100); + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); + final int numDocs = 1000; + for (int i = 0; i < numDocs; ++i) { + Document doc = new Document(); + double value = (randomDouble() * 2 - 1) * 10000; + long scaledValue = Math.round(value * ft.getScalingFactor()); + double rounded = scaledValue / ft.getScalingFactor(); + doc.add(new LongPoint("scaled_float", scaledValue)); + doc.add(new DoublePoint("double", rounded)); + w.addDocument(doc); + } + final DirectoryReader reader = DirectoryReader.open(w); + w.close(); + IndexSearcher searcher = newSearcher(reader); + final int numQueries = 1000; + for (int i = 0; i < numQueries; ++i) { + double l = (randomDouble() * 2 - 1) * 10000; + double u = (randomDouble() * 2 - 1) * 10000; + boolean includeLower = randomBoolean(); + boolean includeUpper = randomBoolean(); + Query doubleQ = NumberFieldMapper.NumberType.DOUBLE.rangeQuery("double", l, u, includeLower, includeUpper); + Query scaledFloatQ = ft.rangeQuery(l, u, includeLower, includeUpper); + assertEquals(searcher.count(doubleQ), searcher.count(scaledFloatQ)); + } + IOUtils.close(reader, dir); + } + + public void testValueForSearch() { + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(); + ft.setName("scaled_float"); + ft.setScalingFactor(0.1 + randomDouble() * 100); + assertNull(ft.valueForSearch(null)); + assertEquals(10/ft.getScalingFactor(), ft.valueForSearch(10L)); + } + + public void testStats() throws IOException { + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(); + ft.setName("scaled_float"); + ft.setScalingFactor(0.1 + randomDouble() * 100); + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); + try (DirectoryReader reader = DirectoryReader.open(w)) { + assertNull(ft.stats(reader)); + } + Document doc = new Document(); + LongPoint point = new LongPoint("scaled_float", -1); + doc.add(point); + w.addDocument(doc); + point.setLongValue(10); + w.addDocument(doc); + try (DirectoryReader reader = DirectoryReader.open(w)) { + FieldStats stats = ft.stats(reader); + assertEquals(-1/ft.getScalingFactor(), stats.getMinValue()); + assertEquals(10/ft.getScalingFactor(), stats.getMaxValue()); + assertEquals(2, stats.getMaxDoc()); + } + w.deleteAll(); + try (DirectoryReader reader = DirectoryReader.open(w)) { + assertNull(ft.stats(reader)); + } + IOUtils.close(w, dir); + } + + public void testFieldData() throws IOException { + ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType(); + ft.setScalingFactor(0.1 + randomDouble() * 100); + Directory dir = newDirectory(); + IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); + Document doc = new Document(); + doc.add(new SortedNumericDocValuesField("scaled_float1", 10)); + doc.add(new SortedNumericDocValuesField("scaled_float2", 5)); + doc.add(new SortedNumericDocValuesField("scaled_float2", 12)); + w.addDocument(doc); + try (DirectoryReader reader = DirectoryReader.open(w)) { + IndexMetaData indexMetadata = new IndexMetaData.Builder("index").settings( + Settings.builder() + .put("index.version.created", Version.CURRENT) + .put("index.number_of_shards", 1) + .put("index.number_of_replicas", 0).build()).build(); + IndexSettings indexSettings = new IndexSettings(indexMetadata, Settings.EMPTY); + + // single-valued + ft.setName("scaled_float1"); + IndexNumericFieldData fielddata = (IndexNumericFieldData) ft.fielddataBuilder().build(indexSettings, ft, null, null, null); + AtomicNumericFieldData leafFieldData = fielddata.load(reader.leaves().get(0)); + SortedNumericDoubleValues values = leafFieldData.getDoubleValues(); + values.setDocument(0); + assertEquals(1, values.count()); + assertEquals(10/ft.getScalingFactor(), values.valueAt(0), 10e-5); + + // multi-valued + ft.setName("scaled_float2"); + fielddata = (IndexNumericFieldData) ft.fielddataBuilder().build(indexSettings, ft, null, null, null); + leafFieldData = fielddata.load(reader.leaves().get(0)); + values = leafFieldData.getDoubleValues(); + values.setDocument(0); + assertEquals(2, values.count()); + assertEquals(5/ft.getScalingFactor(), values.valueAt(0), 10e-5); + assertEquals(12/ft.getScalingFactor(), values.valueAt(1), 10e-5); + } + IOUtils.close(w, dir); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TextFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TextFieldMapperTests.java index 224d512cb53..6ef040233cc 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TextFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TextFieldMapperTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; import java.io.IOException; +import java.util.Arrays; import java.util.HashMap; import java.util.Map; @@ -474,4 +475,32 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { Exception e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); assertEquals("[analyzer] must not have a [null] value", e.getMessage()); } + + public void testNotIndexedFieldPositionIncrement() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "text") + .field("index", false) + .field("position_increment_gap", 10) + .endObject().endObject().endObject().endObject().string(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("Cannot set position_increment_gap on field [field] without positions enabled", e.getMessage()); + } + + public void testAnalyzedFieldPositionIncrementWithoutPositions() throws IOException { + for (String indexOptions : Arrays.asList("docs", "freqs")) { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "text") + .field("index_options", indexOptions) + .field("position_increment_gap", 10) + .endObject().endObject().endObject().endObject().string(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("Cannot set position_increment_gap on field [field] without positions enabled", e.getMessage()); + } + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java index 31e4e2d0923..da0c3d081af 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/core/TokenCountFieldMapperIntegrationIT.java @@ -166,9 +166,9 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase { private SearchRequestBuilder prepareSearch() { SearchRequestBuilder request = client().prepareSearch("test").setTypes("test"); - request.addField("foo.token_count"); + request.addStoredField("foo.token_count"); if (loadCountedFields) { - request.addField("foo"); + request.addStoredField("foo"); } return request; } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java index 350cbc43f9a..99dcaebbc6f 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalValuesMapperIntegrationIT.java @@ -46,7 +46,6 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { .startObject("field").field("type", FakeStringFieldMapper.CONTENT_TYPE).endObject() .endObject() .endObject().endObject()).execute().get(); - ensureYellow("test-idx"); index("test-idx", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -96,7 +95,6 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { .startObject("field").field("type", ExternalMapperPlugin.EXTERNAL).endObject() .endObject() .endObject().endObject()).execute().get(); - ensureYellow("test-idx"); index("test-idx", "type", "1", XContentFactory.jsonBuilder() .startObject() @@ -150,7 +148,6 @@ public class ExternalValuesMapperIntegrationIT extends ESIntegTestCase { .endObject() .endObject() .endObject().endObject().endObject()).execute().get(); - ensureYellow("test-idx"); index("test-idx", "doc", "1", "f", "This is my text"); refresh(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 202afd7a4b1..14dd370fbfd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -816,7 +816,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .field("lon", -74.0059731).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get(); // match all search with geohash field - SearchResponse searchResponse = client().prepareSearch().addField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); + SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); Map m = searchResponse.getHits().getAt(0).getFields(); // ensure single geohash was indexed @@ -841,7 +841,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .field("lon", -74.0059731).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get(); // match all search with geohash field (includes prefixes) - SearchResponse searchResponse = client().prepareSearch().addField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); + SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); Map m = searchResponse.getHits().getAt(0).getFields(); List hashes = m.get("location.geohash").values(); @@ -872,11 +872,11 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { } // query by geohash subfield - SearchResponse searchResponse = client().prepareSearch().addField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); + SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); assertEquals(numDocs, searchResponse.getHits().totalHits()); // query by latlon subfield - searchResponse = client().prepareSearch().addField("location.latlon").setQuery(matchAllQuery()).execute().actionGet(); + searchResponse = client().prepareSearch().addStoredField("location.latlon").setQuery(matchAllQuery()).execute().actionGet(); assertEquals(numDocs, searchResponse.getHits().totalHits()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java index 627f268545a..0133d3e5943 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/merge/TestMergeMapperTests.java @@ -159,7 +159,7 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { final AtomicBoolean stopped = new AtomicBoolean(false); final CyclicBarrier barrier = new CyclicBarrier(2); final AtomicReference lastIntroducedFieldName = new AtomicReference<>(); - final AtomicReference error = new AtomicReference<>(); + final AtomicReference error = new AtomicReference<>(); final Thread updater = new Thread() { @Override public void run() { @@ -173,8 +173,8 @@ public class TestMergeMapperTests extends ESSingleNodeTestCase { lastIntroducedFieldName.set(fieldName); mapperService.merge("test", new CompressedXContent(update.toString()), MapperService.MergeReason.MAPPING_UPDATE, false); } - } catch (Throwable t) { - error.set(t); + } catch (Exception e) { + error.set(e); } finally { stopped.set(true); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java index abc25cc9371..334156184c7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/parent/ParentMappingTests.java @@ -18,13 +18,35 @@ */ package org.elasticsearch.index.mapper.parent; +import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperService.MergeReason; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.similarity.SimilarityService; +import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.IndexSettingsModule; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import static java.util.Collections.emptyList; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; public class ParentMappingTests extends ESSingleNodeTestCase { @@ -42,18 +64,67 @@ public class ParentMappingTests extends ESSingleNodeTestCase { } } - public void testParentSet() throws Exception { + public void testJoinFieldSet() throws Exception { + String parentMapping = XContentFactory.jsonBuilder().startObject().startObject("parent_type") + .endObject().endObject().string(); + String childMapping = XContentFactory.jsonBuilder().startObject().startObject("child_type") + .startObject("_parent").field("type", "parent_type").endObject() + .endObject().endObject().string(); + IndexService indexService = createIndex("test"); + indexService.mapperService().merge("parent_type", new CompressedXContent(parentMapping), MergeReason.MAPPING_UPDATE, false); + indexService.mapperService().merge("child_type", new CompressedXContent(childMapping), MergeReason.MAPPING_UPDATE, false); + + // Indexing parent doc: + DocumentMapper parentDocMapper = indexService.mapperService().documentMapper("parent_type"); + ParsedDocument doc = parentDocMapper.parse(SourceToParse.source("test", "parent_type", "1122", new BytesArray("{}"))); + assertEquals(1, getNumberOfFieldWithParentPrefix(doc.rootDoc())); + assertEquals("1122", doc.rootDoc().getBinaryValue("_parent#parent_type").utf8ToString()); + + // Indexing child doc: + DocumentMapper childDocMapper = indexService.mapperService().documentMapper("child_type"); + doc = childDocMapper.parse(SourceToParse.source("test", "child_type", "1", new BytesArray("{}")).parent("1122")); + + assertEquals(1, getNumberOfFieldWithParentPrefix(doc.rootDoc())); + assertEquals("1122", doc.rootDoc().getBinaryValue("_parent#parent_type").utf8ToString()); + } + + public void testJoinFieldNotSet() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_parent").field("type", "p_type").endObject() .endObject().endObject().string(); DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() .startObject() .field("x_field", "x_value") .endObject() - .bytes()).parent("1122")); + .bytes())); + assertEquals(0, getNumberOfFieldWithParentPrefix(doc.rootDoc())); + } - assertEquals("1122", doc.rootDoc().getBinaryValue("_parent#p_type").utf8ToString()); + public void testNoParentNullFieldCreatedIfNoParentSpecified() throws Exception { + Index index = new Index("_index", "testUUID"); + IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(index, Settings.EMPTY); + AnalysisService analysisService = new AnalysisService(indexSettings, Collections.emptyMap(), Collections.emptyMap(), + Collections.emptyMap(), Collections.emptyMap()); + SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); + MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, + new IndicesModule(new NamedWriteableRegistry(), emptyList()).getMapperRegistry(), () -> null); + XContentBuilder mappingSource = jsonBuilder().startObject().startObject("some_type") + .startObject("properties") + .endObject() + .endObject().endObject(); + mapperService.merge("some_type", new CompressedXContent(mappingSource.string()), MergeReason.MAPPING_UPDATE, false); + Set allFields = new HashSet<>(mapperService.simpleMatchToIndexNames("*")); + assertTrue(allFields.contains("_parent")); + assertFalse(allFields.contains("_parent#null")); + } + + private static int getNumberOfFieldWithParentPrefix(ParseContext.Document doc) { + int numFieldWithParentPrefix = 0; + for (IndexableField field : doc) { + if (field.name().startsWith("_parent")) { + numFieldWithParentPrefix++; + } + } + return numFieldWithParentPrefix; } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java index a08b97b008d..0dc1c4c5118 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/string/SimpleStringMappingTests.java @@ -677,4 +677,34 @@ public class SimpleStringMappingTests extends ESSingleNodeTestCase { () -> mapper.mappers().getMapper("field").fieldType().fielddataBuilder()); assertThat(e.getMessage(), containsString("Fielddata is disabled")); } + + public void testNonAnalyzedFieldPositionIncrement() throws IOException { + for (String index : Arrays.asList("no", "not_analyzed")) { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "string") + .field("index", index) + .field("position_increment_gap", 10) + .endObject().endObject().endObject().endObject().string(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("Cannot set position_increment_gap on field [field] without positions enabled", e.getMessage()); + } + } + + public void testAnalyzedFieldPositionIncrementWithoutPositions() throws IOException { + for (String indexOptions : Arrays.asList("docs", "freqs")) { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field") + .field("type", "string") + .field("index_options", indexOptions) + .field("position_increment_gap", 10) + .endObject().endObject().endObject().endObject().string(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping))); + assertEquals("Cannot set position_increment_gap on field [field] without positions enabled", e.getMessage()); + } + } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java index 78da5abb746..8f38e2be576 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/timestamp/TimestampMappingTests.java @@ -297,7 +297,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { out.close(); BytesReference bytes = out.bytes(); - MappingMetaData metaData = MappingMetaData.PROTO.readFrom(StreamInput.wrap(bytes)); + MappingMetaData metaData = MappingMetaData.PROTO.readFrom(bytes.streamInput()); assertThat(metaData, is(expected)); } @@ -314,7 +314,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { out.close(); BytesReference bytes = out.bytes(); - MappingMetaData metaData = MappingMetaData.PROTO.readFrom(StreamInput.wrap(bytes)); + MappingMetaData metaData = MappingMetaData.PROTO.readFrom(bytes.streamInput()); assertThat(metaData, is(expected)); } @@ -331,7 +331,7 @@ public class TimestampMappingTests extends ESSingleNodeTestCase { out.close(); BytesReference bytes = out.bytes(); - MappingMetaData metaData = MappingMetaData.PROTO.readFrom(StreamInput.wrap(bytes)); + MappingMetaData metaData = MappingMetaData.PROTO.readFrom(bytes.streamInput()); assertThat(metaData, is(expected)); } diff --git a/core/src/test/java/org/elasticsearch/index/query/CombineFunctionTests.java b/core/src/test/java/org/elasticsearch/index/query/CombineFunctionTests.java index 695330c21e2..667efbc8bac 100644 --- a/core/src/test/java/org/elasticsearch/index/query/CombineFunctionTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/CombineFunctionTests.java @@ -40,41 +40,41 @@ public class CombineFunctionTests extends ESTestCase { public void testWriteTo() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { CombineFunction.MULTIPLY.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { CombineFunction.REPLACE.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(1)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { CombineFunction.SUM.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(2)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { CombineFunction.AVG.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(3)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { CombineFunction.MIN.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(4)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { CombineFunction.MAX.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(5)); } } @@ -83,37 +83,37 @@ public class CombineFunctionTests extends ESTestCase { public void testReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(0); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(CombineFunction.readFromStream(in), equalTo(CombineFunction.MULTIPLY)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(1); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(CombineFunction.readFromStream(in), equalTo(CombineFunction.REPLACE)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(2); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(CombineFunction.readFromStream(in), equalTo(CombineFunction.SUM)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(3); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(CombineFunction.readFromStream(in), equalTo(CombineFunction.AVG)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(4); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(CombineFunction.readFromStream(in), equalTo(CombineFunction.MIN)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(5); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(CombineFunction.readFromStream(in), equalTo(CombineFunction.MAX)); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java index 48b0ed7d4ce..910de2cf5d9 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java @@ -349,7 +349,7 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase innerHitBuilders = new HashMap<>(); - InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders); - for (InnerHitBuilder builder : innerHitBuilders.values()) { - builder.build(searchContext, searchContext.innerHits()); - } - assertNotNull(searchContext.innerHits()); - assertEquals(1, searchContext.innerHits().getInnerHits().size()); - assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName())); - InnerHitsContext.BaseInnerHits innerHits = - searchContext.innerHits().getInnerHits().get(queryBuilder.innerHit().getName()); - assertEquals(innerHits.size(), queryBuilder.innerHit().getSize()); - assertEquals(innerHits.sort().sort.getSort().length, 1); - assertEquals(innerHits.sort().sort.getSort()[0].getField(), STRING_FIELD_NAME_2); - } else { - assertThat(searchContext.innerHits().getInnerHits().size(), equalTo(0)); + Map innerHitBuilders = new HashMap<>(); + InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders); + for (InnerHitBuilder builder : innerHitBuilders.values()) { + builder.build(searchContext, searchContext.innerHits()); } + assertNotNull(searchContext.innerHits()); + assertEquals(1, searchContext.innerHits().getInnerHits().size()); + assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName())); + InnerHitsContext.BaseInnerHits innerHits = + searchContext.innerHits().getInnerHits().get(queryBuilder.innerHit().getName()); + assertEquals(innerHits.size(), queryBuilder.innerHit().getSize()); + assertEquals(innerHits.sort().sort.getSort().length, 1); + assertEquals(innerHits.sort().sort.getSort()[0].getField(), STRING_FIELD_NAME_2); } } @@ -305,7 +309,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase innerHitBuilders = new HashMap<>(); - InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders); - for (InnerHitBuilder builder : innerHitBuilders.values()) { - builder.build(searchContext, searchContext.innerHits()); - } - assertNotNull(searchContext.innerHits()); - assertEquals(1, searchContext.innerHits().getInnerHits().size()); - assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName())); - InnerHitsContext.BaseInnerHits innerHits = searchContext.innerHits() - .getInnerHits().get(queryBuilder.innerHit().getName()); - assertEquals(innerHits.size(), queryBuilder.innerHit().getSize()); - assertEquals(innerHits.sort().sort.getSort().length, 1); - assertEquals(innerHits.sort().sort.getSort()[0].getField(), STRING_FIELD_NAME_2); - } else { - assertThat(searchContext.innerHits().getInnerHits().size(), equalTo(0)); + Map innerHitBuilders = new HashMap<>(); + InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders); + for (InnerHitBuilder builder : innerHitBuilders.values()) { + builder.build(searchContext, searchContext.innerHits()); } + assertNotNull(searchContext.innerHits()); + assertEquals(1, searchContext.innerHits().getInnerHits().size()); + assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName())); + InnerHitsContext.BaseInnerHits innerHits = searchContext.innerHits() + .getInnerHits().get(queryBuilder.innerHit().getName()); + assertEquals(innerHits.size(), queryBuilder.innerHit().getSize()); + assertEquals(innerHits.sort().sort.getSort().length, 1); + assertEquals(innerHits.sort().sort.getSort()[0].getField(), STRING_FIELD_NAME_2); } } @@ -196,7 +199,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase randomAsciiOfLengthBetween(1, 16))); - innerHits.setFieldDataFields(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); + innerHits.setStoredFieldNames(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); + innerHits.setDocValueFields(randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16))); // Random script fields deduped on their field name. Map scriptFields = new HashMap<>(); for (SearchSourceBuilder.ScriptField field: randomListStuff(16, InnerHitBuilderTests::randomScript)) { @@ -294,11 +296,11 @@ public class InnerHitBuilderTests extends ESTestCase { break; case 6: if (randomBoolean()) { - instance.setFieldDataFields(randomValueOtherThan(instance.getFieldDataFields(), () -> { + instance.setDocValueFields(randomValueOtherThan(instance.getDocValueFields(), () -> { return randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16)); })); } else { - instance.addFieldDataField(randomAsciiOfLengthBetween(1, 16)); + instance.addDocValueField(randomAsciiOfLengthBetween(1, 16)); } break; case 7: @@ -341,12 +343,12 @@ public class InnerHitBuilderTests extends ESTestCase { HighlightBuilderTests::randomHighlighterBuilder)); break; case 11: - if (instance.getFieldNames() == null || randomBoolean()) { - instance.setFieldNames(randomValueOtherThan(instance.getFieldNames(), () -> { + if (instance.getStoredFieldNames() == null || randomBoolean()) { + instance.setStoredFieldNames(randomValueOtherThan(instance.getStoredFieldNames(), () -> { return randomListStuff(16, () -> randomAsciiOfLengthBetween(1, 16)); })); } else { - instance.getFieldNames().add(randomAsciiOfLengthBetween(1, 16)); + instance.getStoredFieldNames().add(randomAsciiOfLengthBetween(1, 16)); } break; default: @@ -381,7 +383,7 @@ public class InnerHitBuilderTests extends ESTestCase { private static InnerHitBuilder serializedCopy(InnerHitBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return new InnerHitBuilder(in); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index 91a42d70809..3c5bfed86dd 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -33,7 +33,6 @@ import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsResponse; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -208,7 +207,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase { + boolean requiresRewrite = false; + @Override protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { mapperService.merge("nested_doc", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("nested_doc", @@ -68,7 +70,12 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase innerHitBuilders = new HashMap<>(); - InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders); - for (InnerHitBuilder builder : innerHitBuilders.values()) { - builder.build(searchContext, searchContext.innerHits()); - } - assertNotNull(searchContext.innerHits()); - assertEquals(1, searchContext.innerHits().getInnerHits().size()); - assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName())); - InnerHitsContext.BaseInnerHits innerHits = searchContext.innerHits().getInnerHits().get(queryBuilder.innerHit().getName()); - assertEquals(innerHits.size(), queryBuilder.innerHit().getSize()); - assertEquals(innerHits.sort().sort.getSort().length, 1); - assertEquals(innerHits.sort().sort.getSort()[0].getField(), INT_FIELD_NAME); - } else { - assertThat(searchContext.innerHits().getInnerHits().size(), equalTo(0)); + Map innerHitBuilders = new HashMap<>(); + InnerHitBuilder.extractInnerHits(queryBuilder, innerHitBuilders); + for (InnerHitBuilder builder : innerHitBuilders.values()) { + builder.build(searchContext, searchContext.innerHits()); } + assertNotNull(searchContext.innerHits()); + assertEquals(1, searchContext.innerHits().getInnerHits().size()); + assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName())); + InnerHitsContext.BaseInnerHits innerHits = searchContext.innerHits().getInnerHits().get(queryBuilder.innerHit().getName()); + assertEquals(innerHits.size(), queryBuilder.innerHit().getSize()); + assertEquals(innerHits.sort().sort.getSort().length, 1); + assertEquals(innerHits.sort().sort.getSort()[0].getField(), INT_FIELD_NAME); } } @@ -189,7 +196,7 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase()); - } - public void testTerm() { termQuery("name", "kimchy"); } diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 8a3552cf8f2..4996794775c 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -520,7 +520,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase new ScriptQueryBuilder((Script) null)); } - public void testFromJson() throws IOException { + public void testFromJsonVerbose() throws IOException { String json = "{\n" + " \"script\" : {\n" + @@ -67,4 +67,18 @@ public class ScriptQueryBuilderTests extends AbstractQueryTestCase> getScoreFunctions() { + return singletonList(new ScoreFunctionSpec<>(RandomScoreFunctionBuilderWithFixedSeed.NAME, + RandomScoreFunctionBuilderWithFixedSeed::new, RandomScoreFunctionBuilderWithFixedSeed::fromXContent)); } - } } diff --git a/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java b/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java index ba87b293701..c5935f7b32a 100644 --- a/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java +++ b/core/src/test/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java @@ -43,6 +43,11 @@ public class CustomQueryParserIT extends ESIntegTestCase { return pluginList(DummyQueryParserPlugin.class); } + @Override + protected Collection> transportClientPlugins() { + return pluginList(DummyQueryParserPlugin.class); + } + @Override @Before public void setUp() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryBuilder.java b/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryBuilder.java index 334244559c9..08b9ee64742 100644 --- a/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryBuilder.java +++ b/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryBuilder.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.query.plugin; import org.apache.lucene.search.Query; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -34,8 +33,7 @@ import java.io.IOException; import java.util.Optional; public class DummyQueryBuilder extends AbstractQueryBuilder { - private static final String NAME = "dummy"; - static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); + public static final String NAME = "dummy"; public DummyQueryBuilder() { } diff --git a/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java b/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java index e61fe24d297..86c03c72738 100644 --- a/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java +++ b/core/src/test/java/org/elasticsearch/index/query/plugin/DummyQueryParserPlugin.java @@ -24,14 +24,18 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Weight; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.SearchModule; +import org.elasticsearch.plugins.SearchPlugin; import java.io.IOException; +import java.util.List; -public class DummyQueryParserPlugin extends Plugin { +import static java.util.Collections.singletonList; - public void onModule(SearchModule module) { - module.registerQuery(DummyQueryBuilder::new, DummyQueryBuilder::fromXContent, DummyQueryBuilder.QUERY_NAME_FIELD); +public class DummyQueryParserPlugin extends Plugin implements SearchPlugin { + + @Override + public List> getQueries() { + return singletonList(new QuerySpec<>(DummyQueryBuilder.NAME, DummyQueryBuilder::new, DummyQueryBuilder::fromXContent)); } public static class DummyQuery extends Query { diff --git a/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index edf5a0d28f2..62a09584d84 100644 --- a/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -29,6 +29,8 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.elasticsearch.action.admin.indices.recovery.RecoveryRequest; +import org.elasticsearch.action.admin.indices.stats.IndexShardStats; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.index.TransportIndexAction; @@ -46,7 +48,7 @@ import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.env.NodeEnvironment; @@ -61,6 +63,7 @@ import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.similarity.SimilarityService; @@ -106,11 +109,11 @@ import static org.hamcrest.Matchers.equalTo; public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { - private ThreadPool threadPool; - final private Index index = new Index("test", "uuid"); - final private ShardId shardId = new ShardId(index, 0); - final private Map indexMapping = Collections.singletonMap("type", "{ \"type\": {} }"); - protected final static RecoveryTargetService.RecoveryListener recoveryListener = new RecoveryTargetService.RecoveryListener() { + protected ThreadPool threadPool; + private final Index index = new Index("test", "uuid"); + private final ShardId shardId = new ShardId(index, 0); + private final Map indexMapping = Collections.singletonMap("type", "{ \"type\": {} }"); + protected static final RecoveryTargetService.RecoveryListener recoveryListener = new RecoveryTargetService.RecoveryListener() { @Override public void onRecoveryDone(RecoveryState state) { @@ -136,7 +139,7 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { new BiFunction() { @Override public RecoveryTarget apply(IndexShard indexShard, DiscoveryNode node) { - return new RecoveryTarget(indexShard, node, recoveryListener) { + return new RecoveryTarget(indexShard, node, recoveryListener, version -> {}) { @Override public void renameAllTempFiles() throws IOException { super.renameAllTempFiles(); @@ -160,14 +163,15 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { } } - - @Before - public void setup() { + @Override + public void setUp() throws Exception { + super.setUp(); threadPool = new TestThreadPool(getClass().getName()); } - @After - public void destroy() { + @Override + public void tearDown() throws Exception { + super.tearDown(); ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); } @@ -197,8 +201,8 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { return new ReplicationGroup(metaData, homePath); } - private DiscoveryNode getDiscoveryNode(String id) { - return new DiscoveryNode(id, id, DummyTransportAddress.INSTANCE, Collections.emptyMap(), + protected DiscoveryNode getDiscoveryNode(String id) { + return new DiscoveryNode(id, id, new LocalTransportAddress(id), Collections.emptyMap(), Collections.singleton(DiscoveryNode.Role.DATA), Version.CURRENT); } @@ -227,7 +231,7 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { } - class ReplicationGroup implements AutoCloseable, Iterable { + protected class ReplicationGroup implements AutoCloseable, Iterable { private final IndexShard primary; private final List replicas; private final IndexMetaData indexMetaData; @@ -270,7 +274,8 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { primary.recoverFromStore(); primary.updateRoutingEntry(ShardRoutingHelper.moveToStarted(primary.routingEntry())); for (IndexShard replicaShard : replicas) { - recoverReplica(replicaShard, (replica, sourceNode) -> new RecoveryTarget(replica, sourceNode, recoveryListener)); + recoverReplica(replicaShard, + (replica, sourceNode) -> new RecoveryTarget(replica, sourceNode, recoveryListener, version -> {})); } } @@ -279,26 +284,36 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { replicas.add(replica); return replica; } - public void recoverReplica(IndexShard replica, BiFunction targetSupplier) throws IOException { - final DiscoveryNode pNode; - synchronized (this) { - pNode = getDiscoveryNode(primary.routingEntry().currentNodeId()); - } + recoverReplica(replica, targetSupplier, true); + } + + public void recoverReplica(IndexShard replica, BiFunction targetSupplier, + boolean markAsRecovering) + throws IOException { + final DiscoveryNode pNode = getPrimaryNode(); final DiscoveryNode rNode = getDiscoveryNode(replica.routingEntry().currentNodeId()); - replica.markAsRecovering("remote", new RecoveryState(replica.shardId(), false, RecoveryState.Type.REPLICA, pNode, rNode)); + if (markAsRecovering) { + replica.markAsRecovering("remote", new RecoveryState(replica.shardId(), false, RecoveryState.Type.REPLICA, pNode, rNode)); + } else { + assertEquals(replica.state(), IndexShardState.RECOVERING); + } replica.prepareForIndexRecovery(); RecoveryTarget recoveryTarget = targetSupplier.apply(replica, pNode); StartRecoveryRequest request = new StartRecoveryRequest(replica.shardId(), pNode, rNode, replica.store().getMetadataOrEmpty(), RecoveryState.Type.REPLICA, 0); - RecoverySourceHandler recovery = new RecoverySourceHandler(primary, recoveryTarget, request, (int) ByteSizeUnit.MB.toKB(1), - logger); + RecoverySourceHandler recovery = new RecoverySourceHandler(primary, recoveryTarget, request, () -> 0L, e -> () -> {}, + (int) ByteSizeUnit.MB.toKB(1), logger); recovery.recoverToTarget(); recoveryTarget.markAsDone(); replica.updateRoutingEntry(ShardRoutingHelper.moveToStarted(replica.routingEntry())); } + public synchronized DiscoveryNode getPrimaryNode() { + return getDiscoveryNode(primary.routingEntry().currentNodeId()); + } + public Future asyncRecoverReplica(IndexShard replica, BiFunction targetSupplier) throws IOException { FutureTask task = new FutureTask<>(() -> { @@ -375,6 +390,10 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { public Iterator iterator() { return Iterators.concat(replicas.iterator(), Collections.singleton(primary).iterator()); } + + public IndexShard getPrimary() { + return primary; + } } class IndexingOp extends ReplicationOperation { @@ -408,7 +427,7 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { } @Override - public void failShard(String message, Throwable throwable) { + public void failShard(String message, Exception exception) { throw new UnsupportedOperationException(); } @@ -436,14 +455,14 @@ public abstract class ESIndexLevelReplicationTestCase extends ESTestCase { .filter(s -> replicaRouting.isSameAllocation(s.routingEntry())).findFirst().get(); TransportIndexAction.executeIndexRequestOnReplica(request, replica); listener.onResponse(TransportResponse.Empty.INSTANCE); - } catch (Throwable t) { + } catch (Exception t) { listener.onFailure(t); } } @Override - public void failShard(ShardRouting replica, ShardRouting primary, String message, Throwable throwable, Runnable onSuccess, - Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { + public void failShard(ShardRouting replica, ShardRouting primary, String message, Exception exception, Runnable onSuccess, + Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { throw new UnsupportedOperationException(); } } diff --git a/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index 815884edf51..3e26e3018b2 100644 --- a/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -66,7 +66,7 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC BlockingTarget(RecoveryState.Stage stageToBlock, CountDownLatch recoveryBlocked, CountDownLatch releaseRecovery, IndexShard shard, DiscoveryNode sourceNode, RecoveryTargetService.RecoveryListener listener, ESLogger logger) { - super(shard, sourceNode, listener); + super(shard, sourceNode, listener, version -> {}); this.recoveryBlocked = recoveryBlocked; this.releaseRecovery = releaseRecovery; this.stageToBlock = stageToBlock; diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardOperationsLockTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardOperationsLockTests.java new file mode 100644 index 00000000000..c9bb9e19866 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardOperationsLockTests.java @@ -0,0 +1,219 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.shard; + +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.inject.internal.Nullable; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class IndexShardOperationsLockTests extends ESTestCase { + + private static ThreadPool threadPool; + + private IndexShardOperationsLock block; + + @BeforeClass + public static void setupThreadPool() { + threadPool = new TestThreadPool("IndexShardOperationsLockTests"); + } + + @AfterClass + public static void shutdownThreadPool() { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + threadPool = null; + } + + @Before + public void createIndexShardOperationsLock() { + block = new IndexShardOperationsLock(new ShardId("blubb", "id", 0), logger, threadPool); + } + + @After + public void checkNoInflightOperations() { + assertThat(block.semaphore.availablePermits(), equalTo(Integer.MAX_VALUE)); + assertThat(block.getActiveOperationsCount(), equalTo(0)); + } + + public void testAllOperationsInvoked() throws InterruptedException, TimeoutException, ExecutionException { + int numThreads = 10; + + List> futures = new ArrayList<>(); + List operationThreads = new ArrayList<>(); + CountDownLatch latch = new CountDownLatch(numThreads / 2); + for (int i = 0; i < numThreads; i++) { + PlainActionFuture future = new PlainActionFuture() { + @Override + public void onResponse(Releasable releasable) { + releasable.close(); + super.onResponse(releasable); + } + }; + Thread thread = new Thread() { + public void run() { + latch.countDown(); + block.acquire(future, ThreadPool.Names.GENERIC, true); + } + }; + futures.add(future); + operationThreads.add(thread); + } + + CountDownLatch blockFinished = new CountDownLatch(1); + threadPool.generic().execute(() -> { + try { + latch.await(); + blockAndWait().close(); + blockFinished.countDown(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + + for (Thread thread : operationThreads) { + thread.start(); + } + + for (PlainActionFuture future : futures) { + assertNotNull(future.get(1, TimeUnit.MINUTES)); + } + + for (Thread thread : operationThreads) { + thread.join(); + } + + blockFinished.await(); + } + + + public void testOperationsInvokedImmediatelyIfNoBlock() throws ExecutionException, InterruptedException { + PlainActionFuture future = new PlainActionFuture<>(); + block.acquire(future, ThreadPool.Names.GENERIC, true); + assertTrue(future.isDone()); + future.get().close(); + } + + public void testOperationsIfClosed() throws ExecutionException, InterruptedException { + PlainActionFuture future = new PlainActionFuture<>(); + block.close(); + block.acquire(future, ThreadPool.Names.GENERIC, true); + ExecutionException exception = expectThrows(ExecutionException.class, future::get); + assertThat(exception.getCause(), instanceOf(IndexShardClosedException.class)); + } + + public void testBlockIfClosed() throws ExecutionException, InterruptedException { + block.close(); + expectThrows(IndexShardClosedException.class, () -> block.blockOperations(randomInt(10), TimeUnit.MINUTES, + () -> { throw new IllegalArgumentException("fake error"); })); + } + + public void testOperationsDelayedIfBlock() throws ExecutionException, InterruptedException, TimeoutException { + PlainActionFuture future = new PlainActionFuture<>(); + try (Releasable releasable = blockAndWait()) { + block.acquire(future, ThreadPool.Names.GENERIC, true); + assertFalse(future.isDone()); + } + future.get(1, TimeUnit.MINUTES).close(); + } + + protected Releasable blockAndWait() throws InterruptedException { + CountDownLatch blockAcquired = new CountDownLatch(1); + CountDownLatch releaseBlock = new CountDownLatch(1); + CountDownLatch blockReleased = new CountDownLatch(1); + boolean throwsException = randomBoolean(); + IndexShardClosedException exception = new IndexShardClosedException(new ShardId("blubb", "id", 0)); + threadPool.generic().execute(() -> { + try { + block.blockOperations(1, TimeUnit.MINUTES, () -> { + try { + blockAcquired.countDown(); + releaseBlock.await(); + if (throwsException) { + throw exception; + } + } catch (InterruptedException e) { + throw new RuntimeException(); + } + }); + } catch (Exception e) { + if (e != exception) { + throw new RuntimeException(e); + } + } finally { + blockReleased.countDown(); + } + }); + blockAcquired.await(); + return () -> { + releaseBlock.countDown(); + try { + blockReleased.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }; + } + + public void testActiveOperationsCount() throws ExecutionException, InterruptedException { + PlainActionFuture future1 = new PlainActionFuture<>(); + block.acquire(future1, ThreadPool.Names.GENERIC, true); + assertTrue(future1.isDone()); + assertThat(block.getActiveOperationsCount(), equalTo(1)); + + PlainActionFuture future2 = new PlainActionFuture<>(); + block.acquire(future2, ThreadPool.Names.GENERIC, true); + assertTrue(future2.isDone()); + assertThat(block.getActiveOperationsCount(), equalTo(2)); + + future1.get().close(); + assertThat(block.getActiveOperationsCount(), equalTo(1)); + future1.get().close(); // check idempotence + assertThat(block.getActiveOperationsCount(), equalTo(1)); + future2.get().close(); + assertThat(block.getActiveOperationsCount(), equalTo(0)); + + try (Releasable releasable = blockAndWait()) { + assertThat(block.getActiveOperationsCount(), equalTo(0)); + } + + PlainActionFuture future3 = new PlainActionFuture<>(); + block.acquire(future3, ThreadPool.Names.GENERIC, true); + assertTrue(future3.isDone()); + assertThat(block.getActiveOperationsCount(), equalTo(1)); + future3.get().close(); + assertThat(block.getActiveOperationsCount(), equalTo(0)); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 40a23ee66cf..95a705f8e27 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -44,11 +44,14 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.TransportIndexAction; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.InternalClusterInfoService; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.cluster.routing.RestoreSource; @@ -62,13 +65,14 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -90,21 +94,24 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.internal.UidFieldMapper; -import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.Repository; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.snapshots.SnapshotShardFailure; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; +import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; import java.nio.file.Files; @@ -121,6 +128,7 @@ import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -245,7 +253,8 @@ public class IndexShardTests extends ESSingleNodeTestCase { ShardPath shardPath = ShardPath.loadShardPath(logger, env, shard.shardId(), test.getIndexSettings()); assertNotNull(shardPath); // but index can't be opened for a failed shard - assertThat("store index should be corrupted", Store.canOpenIndex(logger, shardPath.resolveIndex(), shard.shardId()), equalTo(false)); + assertThat("store index should be corrupted", Store.canOpenIndex(logger, shardPath.resolveIndex(), shard.shardId(), env::shardLock), + equalTo(false)); } ShardStateMetaData getShardStateMetadata(IndexShard shard) { @@ -295,13 +304,13 @@ public class IndexShardTests extends ESSingleNodeTestCase { client().admin().indices().prepareDelete("test").get(); assertThat(indexShard.getActiveOperationsCount(), equalTo(0)); try { - indexShard.acquirePrimaryOperationLock(); + indexShard.acquirePrimaryOperationLock(null, ThreadPool.Names.INDEX); fail("we should not be able to increment anymore"); } catch (IndexShardClosedException e) { // expected } try { - indexShard.acquireReplicaOperationLock(indexShard.getPrimaryTerm()); + indexShard.acquireReplicaOperationLock(indexShard.getPrimaryTerm(), null, ThreadPool.Names.INDEX); fail("we should not be able to increment anymore"); } catch (IndexShardClosedException e) { // expected @@ -339,21 +348,33 @@ public class IndexShardTests extends ESSingleNodeTestCase { assertEquals(0, indexShard.getActiveOperationsCount()); if (newPrimaryShardRouting.isRelocationTarget() == false) { try { - indexShard.acquireReplicaOperationLock(primaryTerm); + indexShard.acquireReplicaOperationLock(primaryTerm, null, ThreadPool.Names.INDEX); fail("shard shouldn't accept operations as replica"); } catch (IllegalStateException ignored) { } } - Releasable operation1 = indexShard.acquirePrimaryOperationLock(); + Releasable operation1 = acquirePrimaryOperationLockBlockingly(indexShard); assertEquals(1, indexShard.getActiveOperationsCount()); - Releasable operation2 = indexShard.acquirePrimaryOperationLock(); + Releasable operation2 = acquirePrimaryOperationLockBlockingly(indexShard); assertEquals(2, indexShard.getActiveOperationsCount()); Releasables.close(operation1, operation2); assertEquals(0, indexShard.getActiveOperationsCount()); } + private Releasable acquirePrimaryOperationLockBlockingly(IndexShard indexShard) throws ExecutionException, InterruptedException { + PlainActionFuture fut = new PlainActionFuture<>(); + indexShard.acquirePrimaryOperationLock(fut, ThreadPool.Names.INDEX); + return fut.get(); + } + + private Releasable acquireReplicaOperationLockBlockingly(IndexShard indexShard, long opPrimaryTerm) throws ExecutionException, InterruptedException { + PlainActionFuture fut = new PlainActionFuture<>(); + indexShard.acquireReplicaOperationLock(opPrimaryTerm, fut, ThreadPool.Names.INDEX); + return fut.get(); + } + public void testOperationLocksOnReplicaShards() throws InterruptedException, ExecutionException, IOException { assertAcked(client().admin().indices().prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0)).get()); ensureGreen("test"); @@ -399,20 +420,20 @@ public class IndexShardTests extends ESSingleNodeTestCase { assertEquals(0, indexShard.getActiveOperationsCount()); if (newShardRouting.primary() == false) { try { - indexShard.acquirePrimaryOperationLock(); + indexShard.acquirePrimaryOperationLock(null, ThreadPool.Names.INDEX); fail("shard shouldn't accept primary ops"); } catch (IllegalStateException ignored) { } } - Releasable operation1 = indexShard.acquireReplicaOperationLock(primaryTerm); + Releasable operation1 = acquireReplicaOperationLockBlockingly(indexShard, primaryTerm); assertEquals(1, indexShard.getActiveOperationsCount()); - Releasable operation2 = indexShard.acquireReplicaOperationLock(primaryTerm); + Releasable operation2 = acquireReplicaOperationLockBlockingly(indexShard, primaryTerm); assertEquals(2, indexShard.getActiveOperationsCount()); try { - indexShard.acquireReplicaOperationLock(primaryTerm - 1); + indexShard.acquireReplicaOperationLock(primaryTerm - 1, null, ThreadPool.Names.INDEX); fail("you can not increment the operation counter with an older primary term"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), containsString("operation term")); @@ -420,7 +441,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } // but you can increment with a newer one.. - indexShard.acquireReplicaOperationLock(primaryTerm + 1 + randomInt(20)).close(); + acquireReplicaOperationLockBlockingly(indexShard, primaryTerm + 1 + randomInt(20)).close(); Releasables.close(operation1, operation2); assertEquals(0, indexShard.getActiveOperationsCount()); } @@ -448,7 +469,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { public static void write(ShardStateMetaData shardStateMetaData, Path... shardPaths) throws IOException { - ShardStateMetaData.FORMAT.write(shardStateMetaData, shardStateMetaData.legacyVersion, shardPaths); + ShardStateMetaData.FORMAT.write(shardStateMetaData, shardPaths); } public void testDurableFlagHasEffect() { @@ -653,7 +674,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { if (randomBoolean() || true) { // try to serialize it to ensure values survive the serialization BytesStreamOutput out = new BytesStreamOutput(); stats.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); stats = ShardStats.readShardStats(in); } XContentBuilder builder = XContentFactory.jsonBuilder(); @@ -710,7 +731,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } @Override - public void postIndex(Engine.Index index, Throwable ex) { + public void postIndex(Engine.Index index, Exception ex) { postIndexException.incrementAndGet(); } @@ -726,7 +747,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } @Override - public void postDelete(Engine.Delete delete, Throwable ex) { + public void postDelete(Engine.Delete delete, Exception ex) { postDeleteException.incrementAndGet(); } @@ -882,13 +903,18 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService test = indicesService.indexService(resolveIndex("test")); final IndexShard shard = test.getShardOrNull(0); + assertBusy(() -> assertThat(shard.state(), equalTo(IndexShardState.STARTED))); CountDownLatch latch = new CountDownLatch(1); Thread recoveryThread = new Thread(() -> { latch.countDown(); - shard.relocated("simulated recovery"); + try { + shard.relocated("simulated recovery"); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } }); - try (Releasable ignored = shard.acquirePrimaryOperationLock()) { + try (Releasable ignored = acquirePrimaryOperationLockBlockingly(shard)) { // start finalization of recovery recoveryThread.start(); latch.await(); @@ -898,12 +924,50 @@ public class IndexShardTests extends ESSingleNodeTestCase { // recovery can be now finalized recoveryThread.join(); assertThat(shard.state(), equalTo(IndexShardState.RELOCATED)); - try (Releasable ignored = shard.acquirePrimaryOperationLock()) { + try (Releasable ignored = acquirePrimaryOperationLockBlockingly(shard)) { // lock can again be acquired assertThat(shard.state(), equalTo(IndexShardState.RELOCATED)); } } + public void testDelayedOperationsBeforeAndAfterRelocated() throws Exception { + assertAcked(client().admin().indices().prepareCreate("test").setSettings( + Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0) + ).get()); + ensureGreen(); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexService test = indicesService.indexService(resolveIndex("test")); + final IndexShard shard = test.getShardOrNull(0); + assertBusy(() -> assertThat(shard.state(), equalTo(IndexShardState.STARTED))); + Thread recoveryThread = new Thread(() -> { + try { + shard.relocated("simulated recovery"); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + + recoveryThread.start(); + List> onLockAcquiredActions = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + PlainActionFuture onLockAcquired = new PlainActionFuture() { + @Override + public void onResponse(Releasable releasable) { + releasable.close(); + super.onResponse(releasable); + } + }; + shard.acquirePrimaryOperationLock(onLockAcquired, ThreadPool.Names.INDEX); + onLockAcquiredActions.add(onLockAcquired); + } + + for (PlainActionFuture onLockAcquired : onLockAcquiredActions) { + assertNotNull(onLockAcquired.get(30, TimeUnit.SECONDS)); + } + + recoveryThread.join(); + } + public void testStressRelocated() throws Exception { assertAcked(client().admin().indices().prepareCreate("test").setSettings( Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0) @@ -920,10 +984,10 @@ public class IndexShardTests extends ESSingleNodeTestCase { indexThreads[i] = new Thread() { @Override public void run() { - try (Releasable operationLock = shard.acquirePrimaryOperationLock()) { + try (Releasable operationLock = acquirePrimaryOperationLockBlockingly(shard)) { allPrimaryOperationLocksAcquired.countDown(); barrier.await(); - } catch (InterruptedException | BrokenBarrierException e) { + } catch (InterruptedException | BrokenBarrierException | ExecutionException e) { throw new RuntimeException(e); } } @@ -932,7 +996,11 @@ public class IndexShardTests extends ESSingleNodeTestCase { } AtomicBoolean relocated = new AtomicBoolean(); final Thread recoveryThread = new Thread(() -> { - shard.relocated("simulated recovery"); + try { + shard.relocated("simulated recovery"); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } relocated.set(true); }); // ensure we wait for all primary operation locks to be acquired @@ -974,7 +1042,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { routing = ShardRoutingHelper.reinit(routing); IndexShard newShard = test.createShard(routing); newShard.updateRoutingEntry(routing); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.STORE, localNode, localNode)); assertTrue(newShard.recoverFromStore()); assertEquals(translogOps, newShard.recoveryState().getTranslog().recoveredOperations()); @@ -1001,7 +1069,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { routing = ShardRoutingHelper.reinit(routing, UnassignedInfo.Reason.INDEX_CREATED); IndexShard newShard = test.createShard(routing); newShard.updateRoutingEntry(routing); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.STORE, localNode, localNode)); assertTrue(newShard.recoverFromStore()); @@ -1018,7 +1086,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { createIndex("test"); ensureGreen(); IndicesService indicesService = getInstanceFromNode(IndicesService.class); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); IndexService test = indicesService.indexService(resolveIndex("test")); final IndexShard shard = test.getShardOrNull(0); @@ -1114,15 +1182,11 @@ public class IndexShardTests extends ESSingleNodeTestCase { Store targetStore = test_target_shard.store(); test_target_shard.updateRoutingEntry(routing); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); test_target_shard.markAsRecovering("store", new RecoveryState(routing.shardId(), routing.primary(), RecoveryState.Type.SNAPSHOT, routing.restoreSource(), localNode)); - assertTrue(test_target_shard.restoreFromRepository(new IndexShardRepository() { + assertTrue(test_target_shard.restoreFromRepository(new RestoreOnlyRepository() { @Override - public void snapshot(SnapshotId snapshotId, ShardId shardId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) { - } - - @Override - public void restore(SnapshotId snapshotId, Version version, ShardId shardId, ShardId snapshotShardId, RecoveryState recoveryState) { + public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, ShardId snapshotShardId, RecoveryState recoveryState) { try { cleanLuceneIndex(targetStore.directory()); for (String file : sourceStore.directory().listAll()) { @@ -1135,15 +1199,6 @@ public class IndexShardTests extends ESSingleNodeTestCase { throw new RuntimeException(ex); } } - - @Override - public IndexShardSnapshotStatus snapshotStatus(SnapshotId snapshotId, Version version, ShardId shardId) { - return null; - } - - @Override - public void verify(String verificationToken) { - } })); test_target_shard.updateRoutingEntry(routing.moveToStarted()); @@ -1321,7 +1376,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {}; shard.close("simon says", false); AtomicReference shardRef = new AtomicReference<>(); - List failures = new ArrayList<>(); + List failures = new ArrayList<>(); IndexingOperationListener listener = new IndexingOperationListener() { @Override @@ -1331,9 +1386,9 @@ public class IndexShardTests extends ESSingleNodeTestCase { // this is all IMC needs to do - check current memory and refresh assertTrue(shardRef.get().getIndexBufferRAMBytesUsed() > 0); shardRef.get().refresh("test"); - } catch (Throwable t) { - failures.add(t); - throw t; + } catch (Exception e) { + failures.add(e); + throw e; } } @@ -1345,9 +1400,9 @@ public class IndexShardTests extends ESSingleNodeTestCase { // this is all IMC needs to do - check current memory and refresh assertTrue(shardRef.get().getIndexBufferRAMBytesUsed() > 0); shardRef.get().refresh("test"); - } catch (Throwable t) { - failures.add(t); - throw t; + } catch (Exception e) { + failures.add(e); + throw e; } } }; @@ -1398,7 +1453,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { } public static final IndexShard recoverShard(IndexShard newShard) throws IOException { - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.shardId(), newShard.routingEntry().primary(), RecoveryState.Type.STORE, localNode, localNode)); assertTrue(newShard.recoverFromStore()); newShard.updateRoutingEntry(newShard.routingEntry().moveToStarted()); @@ -1410,8 +1465,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexShard newShard = new IndexShard(initializingShardRouting, indexService.getIndexSettings(), shard.shardPath(), shard.store(), indexService.cache(), indexService.mapperService(), indexService.similarityService(), indexService.fieldData(), shard.getEngineFactory(), indexService.getIndexEventListener(), wrapper, - indexService.getThreadPool(), indexService.getBigArrays(), null, Collections.emptyList(), Arrays.asList(listeners) - ); + indexService.getThreadPool(), indexService.getBigArrays(), null, Collections.emptyList(), Arrays.asList(listeners)); return newShard; } @@ -1439,10 +1493,10 @@ public class IndexShardTests extends ESSingleNodeTestCase { ShardRouting routing = getInitializingShardRouting(shard.routingEntry()); test.removeShard(0, "b/c britta says so"); IndexShard newShard = test.createShard(routing); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("for testing", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.REPLICA, localNode, localNode)); List operations = new ArrayList<>(); - operations.add(new Translog.Index("testtype", "1", jsonBuilder().startObject().field("foo", "bar").endObject().bytes().toBytes())); + operations.add(new Translog.Index("testtype", "1", BytesReference.toBytes(jsonBuilder().startObject().field("foo", "bar").endObject().bytes()))); newShard.prepareForIndexRecovery(); newShard.recoveryState().getTranslog().totalOperations(operations.size()); newShard.skipTranslogRecovery(); @@ -1467,7 +1521,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { test.removeShard(0, "b/c britta says so"); IndexShard newShard = test.createShard(routing); newShard.shardRouting = routing; - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("for testing", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.REPLICA, localNode, localNode)); // Shard is still inactive since we haven't started recovering yet assertFalse(newShard.isActive()); @@ -1495,12 +1549,12 @@ public class IndexShardTests extends ESSingleNodeTestCase { ShardRouting routing = getInitializingShardRouting(shard.routingEntry()); test.removeShard(0, "b/c britta says so"); IndexShard newShard = test.createShard(routing); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("for testing", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.REPLICA, localNode, localNode)); // Shard is still inactive since we haven't started recovering yet assertFalse(newShard.isActive()); List operations = new ArrayList<>(); - operations.add(new Translog.Index("testtype", "1", jsonBuilder().startObject().field("foo", "bar").endObject().bytes().toBytes())); + operations.add(new Translog.Index("testtype", "1", BytesReference.toBytes(jsonBuilder().startObject().field("foo", "bar").endObject().bytes()))); newShard.prepareForIndexRecovery(); newShard.skipTranslogRecovery(); // Shard is still inactive since we haven't started recovering yet @@ -1530,7 +1584,7 @@ public class IndexShardTests extends ESSingleNodeTestCase { IndexShard shard = test.getShardOrNull(0); ShardRouting routing = ShardRoutingHelper.initWithSameId(shard.routingEntry()); test.removeShard(0, "b/c simon says so"); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); { final IndexShard newShard = test.createShard(routing); newShard.updateRoutingEntry(routing); @@ -1588,4 +1642,67 @@ public class IndexShardTests extends ESSingleNodeTestCase { assertEquals(mappings.get("index_1").get("test").get().source().string(), "{\"test\":{\"properties\":{\"foo\":{\"type\":\"text\"}}}}"); } + + /** A dummy repository for testing which just needs restore overridden */ + private abstract static class RestoreOnlyRepository extends AbstractLifecycleComponent implements Repository { + public RestoreOnlyRepository() { + super(Settings.EMPTY); + } + @Override + protected void doStart() {} + @Override + protected void doStop() {} + @Override + protected void doClose() {} + @Override + public RepositoryMetaData getMetadata() { + return null; + } + @Override + public SnapshotInfo getSnapshotInfo(SnapshotId snapshotId) { + return null; + } + @Override + public MetaData getSnapshotMetaData(SnapshotInfo snapshot, List indices) throws IOException { + return null; + } + @Override + public List getSnapshots() { + return null; + } + @Override + public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData metaData) {} + @Override + public SnapshotInfo finalizeSnapshot(SnapshotId snapshotId, List indices, long startTime, String failure, int totalShards, List shardFailures) { + return null; + } + @Override + public void deleteSnapshot(SnapshotId snapshotId) {} + @Override + public long getSnapshotThrottleTimeInNanos() { + return 0; + } + @Override + public long getRestoreThrottleTimeInNanos() { + return 0; + } + @Override + public String startVerification() { + return null; + } + @Override + public void endVerification(String verificationToken) {} + @Override + public boolean isReadOnly() { + return false; + } + @Override + public void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) {} + @Override + public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, ShardId shardId) { + return null; + } + @Override + public void verify(String verificationToken, DiscoveryNode localNode) {} + } } diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java index 8d86e64a391..d1cf8b32f58 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexingOperationListenerTests.java @@ -51,7 +51,7 @@ public class IndexingOperationListenerTests extends ESTestCase{ } @Override - public void postIndex(Engine.Index index, Throwable ex) { + public void postIndex(Engine.Index index, Exception ex) { postIndexException.incrementAndGet(); } @@ -67,7 +67,7 @@ public class IndexingOperationListenerTests extends ESTestCase{ } @Override - public void postDelete(Engine.Delete delete, Throwable ex) { + public void postDelete(Engine.Delete delete, Exception ex) { postDeleteException.incrementAndGet(); } }; @@ -83,7 +83,7 @@ public class IndexingOperationListenerTests extends ESTestCase{ throw new RuntimeException(); } @Override - public void postIndex(Engine.Index index, Throwable ex) { + public void postIndex(Engine.Index index, Exception ex) { throw new RuntimeException(); } @Override @@ -96,7 +96,7 @@ public class IndexingOperationListenerTests extends ESTestCase{ throw new RuntimeException(); } @Override - public void postDelete(Engine.Delete delete, Throwable ex) { + public void postDelete(Engine.Delete delete, Exception ex) { throw new RuntimeException(); } }; diff --git a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 4938f686f60..5b896190d09 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -56,6 +56,8 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.threadpool.ThreadPool.Cancellable; +import org.elasticsearch.threadpool.ThreadPool.Names; import org.junit.After; import org.junit.Before; @@ -115,7 +117,7 @@ public class RefreshListenersTests extends ESTestCase { BigArrays.NON_RECYCLING_INSTANCE); Engine.EventListener eventListener = new Engine.EventListener() { @Override - public void onFailedEngine(String reason, @Nullable Throwable t) { + public void onFailedEngine(String reason, @Nullable Exception e) { // we don't need to notify anybody in this test } }; @@ -222,7 +224,7 @@ public class RefreshListenersTests extends ESTestCase { maxListeners = between(1, threadCount * 2); // This thread just refreshes every once in a while to cause trouble. - ScheduledFuture refresher = threadPool.scheduleWithFixedDelay(() -> engine.refresh("because test"), timeValueMillis(100)); + Cancellable refresher = threadPool.scheduleWithFixedDelay(() -> engine.refresh("because test"), timeValueMillis(100), Names.SAME); // These threads add and block until the refresh makes the change visible and then do a non-realtime get. Thread[] indexers = new Thread[threadCount]; @@ -251,7 +253,7 @@ public class RefreshListenersTests extends ESTestCase { getResult.docIdAndVersion().context.reader().document(getResult.docIdAndVersion().docId, visitor); assertEquals(Arrays.asList(testFieldValue), visitor.fields().get("test")); } - } catch (Throwable t) { + } catch (Exception t) { throw new RuntimeException("failure on the [" + iteration + "] iteration of thread [" + threadId + "]", t); } } @@ -262,7 +264,7 @@ public class RefreshListenersTests extends ESTestCase { for (Thread indexer: indexers) { indexer.join(); } - FutureUtils.cancel(refresher); + refresher.cancel(); } private Engine.Index index(String id) { @@ -279,7 +281,7 @@ public class RefreshListenersTests extends ESTestCase { document.add(uidField); document.add(versionField); BytesReference source = new BytesArray(new byte[] { 1 }); - ParsedDocument doc = new ParsedDocument(versionField, id, type, null, -1, -1, Arrays.asList(document), source, null); + ParsedDocument doc = new ParsedDocument(versionField, id, type, null, -1, -1, Arrays.asList(document), source, null); Engine.Index index = new Engine.Index(new Term("_uid", uid), doc); engine.index(index); return index; @@ -290,7 +292,7 @@ public class RefreshListenersTests extends ESTestCase { * When the listener is called this captures it's only argument. */ AtomicReference forcedRefresh = new AtomicReference<>(); - private volatile Throwable error; + private volatile Exception error; @Override public void accept(Boolean forcedRefresh) { @@ -298,7 +300,7 @@ public class RefreshListenersTests extends ESTestCase { assertNotNull(forcedRefresh); Boolean oldValue = this.forcedRefresh.getAndSet(forcedRefresh); assertNull("Listener called twice", oldValue); - } catch (Throwable e) { + } catch (Exception e) { error = e; } } diff --git a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java index b2bd7e1f9ff..749b1621e4d 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/ShardPathTests.java @@ -45,7 +45,7 @@ public class ShardPathTests extends ESTestCase { ShardId shardId = new ShardId("foo", "0xDEADBEEF", 0); Path[] paths = env.availableShardPaths(shardId); Path path = randomFrom(paths); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, "0xDEADBEEF", AllocationId.newInitializing()), 2, path); + ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, "0xDEADBEEF", AllocationId.newInitializing()), path); ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)); assertEquals(path, shardPath.getDataPath()); assertEquals("0xDEADBEEF", shardPath.getShardId().getIndex().getUUID()); @@ -65,7 +65,7 @@ public class ShardPathTests extends ESTestCase { Path[] paths = env.availableShardPaths(shardId); assumeTrue("This test tests multi data.path but we only got one", paths.length > 1); int id = randomIntBetween(1, 10); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, indexUUID, AllocationId.newInitializing()), id, paths); + ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, indexUUID, AllocationId.newInitializing()), paths); ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)); fail("Expected IllegalStateException"); } catch (IllegalStateException e) { @@ -82,7 +82,7 @@ public class ShardPathTests extends ESTestCase { Path[] paths = env.availableShardPaths(shardId); Path path = randomFrom(paths); int id = randomIntBetween(1, 10); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, "0xDEADBEEF", AllocationId.newInitializing()), id, path); + ShardStateMetaData.FORMAT.write(new ShardStateMetaData(id, true, "0xDEADBEEF", AllocationId.newInitializing()), path); ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), settings)); fail("Expected IllegalStateException"); } catch (IllegalStateException e) { @@ -124,7 +124,7 @@ public class ShardPathTests extends ESTestCase { final boolean includeNodeId = randomBoolean(); indexSettings = indexSettingsBuilder.put(IndexMetaData.SETTING_DATA_PATH, "custom").build(); nodeSettings = Settings.builder().put(Environment.PATH_SHARED_DATA_SETTING.getKey(), path.toAbsolutePath().toAbsolutePath()) - .put(NodeEnvironment.ADD_NODE_ID_TO_CUSTOM_PATH.getKey(), includeNodeId).build(); + .put(NodeEnvironment.ADD_NODE_LOCK_ID_TO_CUSTOM_PATH.getKey(), includeNodeId).build(); if (includeNodeId) { customPath = path.resolve("custom").resolve("0"); } else { @@ -139,7 +139,7 @@ public class ShardPathTests extends ESTestCase { ShardId shardId = new ShardId("foo", indexUUID, 0); Path[] paths = env.availableShardPaths(shardId); Path path = randomFrom(paths); - ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, indexUUID, AllocationId.newInitializing()), 2, path); + ShardStateMetaData.FORMAT.write(new ShardStateMetaData(2, true, indexUUID, AllocationId.newInitializing()), path); ShardPath shardPath = ShardPath.loadShardPath(logger, env, shardId, IndexSettingsModule.newIndexSettings(shardId.getIndex(), indexSettings)); boolean found = false; for (Path p : env.nodeDataPaths()) { diff --git a/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java b/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java index 70eacaafedb..abaebb88c5e 100644 --- a/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java +++ b/core/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.snapshots.blobstore; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -52,7 +53,7 @@ public class FileInfoTests extends ESTestCase { BlobStoreIndexShardSnapshot.FileInfo info = new BlobStoreIndexShardSnapshot.FileInfo("_foobar", meta, size); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).prettyPrint(); BlobStoreIndexShardSnapshot.FileInfo.toXContent(info, builder, ToXContent.EMPTY_PARAMS); - byte[] xcontent = shuffleXContent(builder).bytes().toBytes(); + byte[] xcontent = BytesReference.toBytes(shuffleXContent(builder).bytes()); final BlobStoreIndexShardSnapshot.FileInfo parsedInfo; try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(xcontent)) { @@ -111,7 +112,7 @@ public class FileInfoTests extends ESTestCase { builder.field(FileInfo.WRITTEN_BY, Version.LATEST.toString()); builder.field(FileInfo.CHECKSUM, "666"); builder.endObject(); - byte[] xContent = builder.bytes().toBytes(); + byte[] xContent = BytesReference.toBytes(builder.bytes()); if (failure == null) { // No failures should read as usual diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java index 6508336d9f8..a8f8a9f802d 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -22,6 +22,7 @@ import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -192,7 +193,7 @@ public class CorruptedFileIT extends ESIntegTestCase { * run the checkindex. if the corruption is still there we will catch it. */ final CountDownLatch latch = new CountDownLatch(numShards * 3); // primary + 2 replicas - final CopyOnWriteArrayList exception = new CopyOnWriteArrayList<>(); + final CopyOnWriteArrayList exception = new CopyOnWriteArrayList<>(); final IndexEventListener listener = new IndexEventListener() { @Override public void afterIndexShardClosed(ShardId sid, @Nullable IndexShard indexShard, Settings indexSettings) { @@ -210,12 +211,12 @@ public class CorruptedFileIT extends ESIntegTestCase { out.flush(); CheckIndex.Status status = checkIndex.checkIndex(); if (!status.clean) { - logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8)); + logger.warn("check index [failure]\n{}", os.bytes().utf8ToString()); throw new IOException("index check failure"); } } - } catch (Throwable t) { - exception.add(t); + } catch (Exception e) { + exception.add(e); } finally { store.decRef(); latch.countDown(); @@ -346,7 +347,7 @@ public class CorruptedFileIT extends ESIntegTestCase { public void sendRequest(DiscoveryNode node, long requestId, String action, TransportRequest request, TransportRequestOptions options) throws IOException, TransportException { if (corrupt.get() && action.equals(RecoveryTargetService.Actions.FILE_CHUNK)) { RecoveryFileChunkRequest req = (RecoveryFileChunkRequest) request; - byte[] array = req.content().array(); + byte[] array = BytesRef.deepCopyOf(req.content().toBytesRef()).bytes; int i = randomIntBetween(0, req.content().length() - 1); array[i] = (byte) ~array[i]; // flip one byte in the content hasCorrupted.countDown(); @@ -419,10 +420,12 @@ public class CorruptedFileIT extends ESIntegTestCase { if (action.equals(RecoveryTargetService.Actions.FILE_CHUNK)) { RecoveryFileChunkRequest req = (RecoveryFileChunkRequest) request; if (truncate && req.length() > 1) { - BytesArray array = new BytesArray(req.content().array(), req.content().arrayOffset(), (int) req.length() - 1); + BytesRef bytesRef = req.content().toBytesRef(); + BytesArray array = new BytesArray(bytesRef.bytes, bytesRef.offset, (int) req.length() - 1); request = new RecoveryFileChunkRequest(req.recoveryId(), req.shardId(), req.metadata(), req.position(), array, req.lastChunk(), req.totalTranslogOps(), req.sourceThrottleTimeInNanos()); } else { - byte[] array = req.content().array(); + assert req.content().toBytesRef().bytes == req.content().toBytesRef().bytes : "no internal reference!!"; + final byte[] array = req.content().toBytesRef().bytes; int i = randomIntBetween(0, req.content().length() - 1); array[i] = (byte) ~array[i]; // flip one byte in the content } @@ -643,12 +646,12 @@ public class CorruptedFileIT extends ESIntegTestCase { return shardRouting; } - private static final boolean isPerCommitFile(String fileName) { + private static boolean isPerCommitFile(String fileName) { // .liv and segments_N are per commit files and might change after corruption return fileName.startsWith("segments") || fileName.endsWith(".liv"); } - private static final boolean isPerSegmentFile(String fileName) { + private static boolean isPerSegmentFile(String fileName) { return isPerCommitFile(fileName) == false; } diff --git a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java index 62ef2e029b9..054b588fc56 100644 --- a/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/CorruptedTranslogIT.java @@ -80,7 +80,6 @@ public class CorruptedTranslogIT extends ESIntegTestCase { .put("index.refresh_interval", "-1") .put(MockEngineSupport.DISABLE_FLUSH_ON_CLOSE.getKey(), true) // never flush - always recover from translog )); - ensureYellow(); // Index some documents int numDocs = scaledRandomIntBetween(100, 1000); diff --git a/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java b/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java index 163d72f4553..590c5c624f1 100644 --- a/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java +++ b/core/src/test/java/org/elasticsearch/index/store/ExceptionRetryIT.java @@ -113,7 +113,7 @@ public class ExceptionRetryIT extends ESIntegTestCase { } refresh(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(numDocs * 2).addField("_id").get(); + SearchResponse searchResponse = client().prepareSearch("index").setSize(numDocs * 2).addStoredField("_id").get(); Set uniqueIds = new HashSet(); long dupCounter = 0; diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java index e40f1c7f06f..e1636b713a1 100644 --- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -1000,14 +1000,14 @@ public class StoreTests extends ESTestCase { IndexWriterConfig iwc = newIndexWriterConfig(); Path tempDir = createTempDir(); final BaseDirectoryWrapper dir = newFSDirectory(tempDir); - assertFalse(Store.canOpenIndex(logger, tempDir,shardId)); + assertFalse(Store.canOpenIndex(logger, tempDir, shardId, (id, l) -> new DummyShardLock(id))); IndexWriter writer = new IndexWriter(dir, iwc); Document doc = new Document(); doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); writer.addDocument(doc); writer.commit(); writer.close(); - assertTrue(Store.canOpenIndex(logger, tempDir, shardId)); + assertTrue(Store.canOpenIndex(logger, tempDir, shardId, (id, l) -> new DummyShardLock(id))); DirectoryService directoryService = new DirectoryService(shardId, INDEX_SETTINGS) { @Override @@ -1022,7 +1022,7 @@ public class StoreTests extends ESTestCase { }; Store store = new Store(shardId, INDEX_SETTINGS, directoryService, new DummyShardLock(shardId)); store.markStoreCorrupted(new CorruptIndexException("foo", "bar")); - assertFalse(Store.canOpenIndex(logger, tempDir, shardId)); + assertFalse(Store.canOpenIndex(logger, tempDir, shardId, (id, l) -> new DummyShardLock(id))); store.close(); } diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index c72a6ecef27..c9b3daa806a 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.translog; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.index.Term; import org.apache.lucene.mockfile.FilterFileChannel; @@ -60,9 +59,7 @@ import java.nio.charset.Charset; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.InvalidPathException; -import java.nio.file.OpenOption; import java.nio.file.Path; -import java.nio.file.Paths; import java.nio.file.StandardOpenOption; import java.util.ArrayList; import java.util.Collection; @@ -217,24 +214,24 @@ public class TranslogTests extends ESTestCase { Translog.Location loc2 = translog.add(new Translog.Index("test", "2", new byte[]{2})); assertThat(loc2, greaterThan(loc1)); assertThat(translog.getLastWriteLocation(), greaterThan(loc2)); - assertThat(translog.read(loc1).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{1}))); - assertThat(translog.read(loc2).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{2}))); + assertThat(translog.read(loc1).getSource().source, equalTo(new BytesArray(new byte[]{1}))); + assertThat(translog.read(loc2).getSource().source, equalTo(new BytesArray(new byte[]{2}))); Translog.Location lastLocBeforeSync = translog.getLastWriteLocation(); translog.sync(); assertEquals(lastLocBeforeSync, translog.getLastWriteLocation()); - assertThat(translog.read(loc1).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{1}))); - assertThat(translog.read(loc2).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{2}))); + assertThat(translog.read(loc1).getSource().source, equalTo(new BytesArray(new byte[]{1}))); + assertThat(translog.read(loc2).getSource().source, equalTo(new BytesArray(new byte[]{2}))); Translog.Location loc3 = translog.add(new Translog.Index("test", "2", new byte[]{3})); assertThat(loc3, greaterThan(loc2)); assertThat(translog.getLastWriteLocation(), greaterThan(loc3)); - assertThat(translog.read(loc3).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{3}))); + assertThat(translog.read(loc3).getSource().source, equalTo(new BytesArray(new byte[]{3}))); lastLocBeforeSync = translog.getLastWriteLocation(); translog.sync(); assertEquals(lastLocBeforeSync, translog.getLastWriteLocation()); - assertThat(translog.read(loc3).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{3}))); + assertThat(translog.read(loc3).getSource().source, equalTo(new BytesArray(new byte[]{3}))); translog.prepareCommit(); /* * The commit adds to the lastWriteLocation even though is isn't really a write. This is just an implementation artifact but it can @@ -242,7 +239,7 @@ public class TranslogTests extends ESTestCase { * and less than the location of the next write operation. */ assertThat(translog.getLastWriteLocation(), greaterThan(lastLocBeforeSync)); - assertThat(translog.read(loc3).getSource().source.toBytesArray(), equalTo(new BytesArray(new byte[]{3}))); + assertThat(translog.read(loc3).getSource().source, equalTo(new BytesArray(new byte[]{3}))); translog.commit(); assertNull(translog.read(loc1)); assertNull(translog.read(loc2)); @@ -274,7 +271,7 @@ public class TranslogTests extends ESTestCase { Translog.Index index = (Translog.Index) snapshot.next(); assertThat(index != null, equalTo(true)); - assertThat(index.source().toBytes(), equalTo(new byte[]{1})); + assertThat(BytesReference.toBytes(index.source()), equalTo(new byte[]{1})); Translog.Delete delete = (Translog.Delete) snapshot.next(); assertThat(delete != null, equalTo(true)); @@ -303,7 +300,7 @@ public class TranslogTests extends ESTestCase { if (randomBoolean()) { BytesStreamOutput out = new BytesStreamOutput(); stats.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); stats = new TranslogStats(); stats.readFrom(in); } @@ -350,7 +347,7 @@ public class TranslogTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); total.writeTo(out); TranslogStats copy = new TranslogStats(); - copy.readFrom(StreamInput.wrap(out.bytes())); + copy.readFrom(out.bytes().streamInput()); assertEquals(6, copy.estimatedNumberOfOperations()); assertEquals(431, copy.getTranslogSizeInBytes()); @@ -458,7 +455,7 @@ public class TranslogTests extends ESTestCase { final BlockingQueue writtenOperations = new ArrayBlockingQueue<>(threadCount * opsPerThread); Thread[] threads = new Thread[threadCount]; - final Throwable[] threadExceptions = new Throwable[threadCount]; + final Exception[] threadExceptions = new Exception[threadCount]; final CountDownLatch downLatch = new CountDownLatch(1); for (int i = 0; i < threadCount; i++) { final int threadId = i; @@ -626,7 +623,7 @@ public class TranslogTests extends ESTestCase { final AtomicBoolean run = new AtomicBoolean(true); // any errors on threads - final List errors = new CopyOnWriteArrayList<>(); + final List errors = new CopyOnWriteArrayList<>(); logger.debug("using [{}] readers. [{}] writers. flushing every ~[{}] ops.", readers.length, writers.length, flushEveryOps); for (int i = 0; i < writers.length; i++) { final String threadName = "writer_" + i; @@ -665,9 +662,9 @@ public class TranslogTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { - logger.error("--> writer [{}] had an error", t, threadName); - errors.add(t); + public void onFailure(Exception e) { + logger.error("--> writer [{}] had an error", e, threadName); + errors.add(e); } }, threadName); writers[i].start(); @@ -680,14 +677,14 @@ public class TranslogTests extends ESTestCase { Set writtenOpsAtView; @Override - public void onFailure(Throwable t) { - logger.error("--> reader [{}] had an error", t, threadId); - errors.add(t); + public void onFailure(Exception e) { + logger.error("--> reader [{}] had an error", e, threadId); + errors.add(e); try { closeView(); - } catch (IOException e) { - logger.error("unexpected error while closing view, after failure"); - t.addSuppressed(e); + } catch (IOException inner) { + inner.addSuppressed(e); + logger.error("unexpected error while closing view, after failure", inner); } } @@ -827,7 +824,7 @@ public class TranslogTests extends ESTestCase { assertEquals(max.generation, translog.currentFileGeneration()); final Translog.Operation read = translog.read(max); - assertEquals(read.getSource().source.toUtf8(), Integer.toString(count)); + assertEquals(read.getSource().source.utf8ToString(), Integer.toString(count)); } public static Translog.Location max(Translog.Location a, Translog.Location b) { @@ -859,7 +856,7 @@ public class TranslogTests extends ESTestCase { Translog.Location location = locations.get(op); if (op <= lastSynced) { final Translog.Operation read = reader.read(location); - assertEquals(Integer.toString(op), read.getSource().source.toUtf8()); + assertEquals(Integer.toString(op), read.getSource().source.utf8ToString()); } else { try { reader.read(location); @@ -995,7 +992,7 @@ public class TranslogTests extends ESTestCase { assertEquals("expected operation" + i + " to be in the previous translog but wasn't", translog.currentFileGeneration() - 1, locations.get(i).generation); Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null", next); - assertEquals(i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals(i, Integer.parseInt(next.getSource().source.utf8ToString())); } } } @@ -1030,7 +1027,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < upTo; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.utf8ToString())); } } if (randomBoolean()) { // recover twice @@ -1043,7 +1040,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < upTo; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.utf8ToString())); } } } @@ -1084,7 +1081,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < upTo; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.utf8ToString())); } } @@ -1099,7 +1096,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < upTo; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.utf8ToString())); } } } @@ -1143,7 +1140,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < upTo; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null synced: " + sync, next); - assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch, synced: " + sync, i, Integer.parseInt(next.getSource().source.utf8ToString())); } } } @@ -1157,7 +1154,7 @@ public class TranslogTests extends ESTestCase { ops.add(test); } Translog.writeOperations(out, ops); - final List readOperations = Translog.readOperations(StreamInput.wrap(out.bytes())); + final List readOperations = Translog.readOperations(out.bytes().streamInput()); assertEquals(ops.size(), readOperations.size()); assertEquals(ops, readOperations); } @@ -1218,7 +1215,7 @@ public class TranslogTests extends ESTestCase { for (int i = firstUncommitted; i < translogOperations; i++) { Translog.Operation next = snapshot.next(); assertNotNull("" + i, next); - assertEquals(Integer.parseInt(next.getSource().source.toUtf8()), i); + assertEquals(Integer.parseInt(next.getSource().source.utf8ToString()), i); } assertNull(snapshot.next()); } @@ -1242,7 +1239,7 @@ public class TranslogTests extends ESTestCase { final BlockingQueue writtenOperations = new ArrayBlockingQueue<>(threadCount * opsPerThread); Thread[] threads = new Thread[threadCount]; - final Throwable[] threadExceptions = new Throwable[threadCount]; + final Exception[] threadExceptions = new Exception[threadCount]; final CountDownLatch downLatch = new CountDownLatch(1); for (int i = 0; i < threadCount; i++) { final int threadId = i; @@ -1269,10 +1266,10 @@ public class TranslogTests extends ESTestCase { private final int opsPerThread; private final int threadId; private final Collection writtenOperations; - private final Throwable[] threadExceptions; + private final Exception[] threadExceptions; private final Translog translog; - public TranslogThread(Translog translog, CountDownLatch downLatch, int opsPerThread, int threadId, Collection writtenOperations, Throwable[] threadExceptions) { + public TranslogThread(Translog translog, CountDownLatch downLatch, int opsPerThread, int threadId, Collection writtenOperations, Exception[] threadExceptions) { this.translog = translog; this.downLatch = downLatch; this.opsPerThread = opsPerThread; @@ -1306,7 +1303,7 @@ public class TranslogTests extends ESTestCase { writtenOperations.add(new LocationOperation(op, loc)); afterAdd(); } - } catch (Throwable t) { + } catch (Exception t) { threadExceptions[threadId] = t; } } @@ -1392,7 +1389,7 @@ public class TranslogTests extends ESTestCase { assertEquals("expected operation" + i + " to be in the previous translog but wasn't", tlog.currentFileGeneration() - 1, locations.get(i).generation); Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null", next); - assertEquals(i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals(i, Integer.parseInt(next.getSource().source.utf8ToString())); } } } @@ -1448,7 +1445,7 @@ public class TranslogTests extends ESTestCase { final int threadCount = randomIntBetween(1, 5); Thread[] threads = new Thread[threadCount]; - final Throwable[] threadExceptions = new Throwable[threadCount]; + final Exception[] threadExceptions = new Exception[threadCount]; final CountDownLatch downLatch = new CountDownLatch(1); final CountDownLatch added = new CountDownLatch(randomIntBetween(10, 100)); List writtenOperations = Collections.synchronizedList(new ArrayList<>()); @@ -1716,7 +1713,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < 1; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null", next); - assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.utf8ToString())); } tlog.add(new Translog.Index("test", "" + 1, Integer.toString(1).getBytes(Charset.forName("UTF-8")))); } @@ -1727,7 +1724,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < 2; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null", next); - assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.utf8ToString())); } } } @@ -1771,7 +1768,7 @@ public class TranslogTests extends ESTestCase { for (int i = 0; i < 1; i++) { Translog.Operation next = snapshot.next(); assertNotNull("operation " + i + " must be non-null", next); - assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.toUtf8())); + assertEquals("payload missmatch", i, Integer.parseInt(next.getSource().source.utf8ToString())); } tlog.add(new Translog.Index("test", "" + 1, Integer.toString(1).getBytes(Charset.forName("UTF-8")))); } @@ -1870,7 +1867,7 @@ public class TranslogTests extends ESTestCase { assertEquals(syncedDocs.size(), snapshot.totalOperations()); for (int i = 0; i < syncedDocs.size(); i++) { Translog.Operation next = snapshot.next(); - assertEquals(syncedDocs.get(i), next.getSource().source.toUtf8()); + assertEquals(syncedDocs.get(i), next.getSource().source.utf8ToString()); assertNotNull("operation " + i + " must be non-null", next); } } diff --git a/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java b/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java index 23925f574ff..eb5ae3b1dbd 100644 --- a/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java +++ b/core/src/test/java/org/elasticsearch/indexing/IndexActionIT.java @@ -50,7 +50,7 @@ public class IndexActionIT extends ESIntegTestCase { public void testAutoGenerateIdNoDuplicates() throws Exception { int numberOfIterations = scaledRandomIntBetween(10, 50); for (int i = 0; i < numberOfIterations; i++) { - Throwable firstError = null; + Exception firstError = null; createIndex("test"); int numOfDocs = randomIntBetween(10, 100); logger.info("indexing [{}] docs", numOfDocs); @@ -59,26 +59,25 @@ public class IndexActionIT extends ESIntegTestCase { builders.add(client().prepareIndex("test", "type").setSource("field", "value")); } indexRandom(true, builders); - ensureYellow("test"); logger.info("verifying indexed content"); int numOfChecks = randomIntBetween(8, 12); for (int j = 0; j < numOfChecks; j++) { try { logger.debug("running search with all types"); assertHitCount(client().prepareSearch("test").get(), numOfDocs); - } catch (Throwable t) { - logger.error("search for all docs types failed", t); + } catch (Exception e) { + logger.error("search for all docs types failed", e); if (firstError == null) { - firstError = t; + firstError = e; } } try { logger.debug("running search with a specific type"); assertHitCount(client().prepareSearch("test").setTypes("type").get(), numOfDocs); - } catch (Throwable t) { - logger.error("search for all docs of a specific type failed", t); + } catch (Exception e) { + logger.error("search for all docs of a specific type failed", e); if (firstError == null) { - firstError = t; + firstError = e; } } } diff --git a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java index 207581022ba..f5bf152b598 100644 --- a/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java +++ b/core/src/test/java/org/elasticsearch/indexlifecycle/IndexLifecycleActionIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; -import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.test.ESIntegTestCase; @@ -45,6 +44,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; @@ -71,13 +71,7 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { logger.info("Creating index [test]"); CreateIndexResponse createIndexResponse = client().admin().indices().create(createIndexRequest("test").settings(settings)).actionGet(); - assertThat(createIndexResponse.isAcknowledged(), equalTo(true)); - - logger.info("Running Cluster Health"); - ClusterHealthResponse clusterHealth = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); - logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); - assertThat(clusterHealth.isTimedOut(), equalTo(false)); - assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.YELLOW)); + assertAcked(createIndexResponse); ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); RoutingNode routingNodeEntry1 = clusterState.getRoutingNodes().node(node1); @@ -88,8 +82,9 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { String server_2 = internalCluster().startNode(); // first wait for 2 nodes in the cluster - logger.info("Running Cluster Health"); - clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForGreenStatus().waitForNodes("2")).actionGet(); + logger.info("Waiting for replicas to be assigned"); + ClusterHealthResponse clusterHealth = + client().admin().cluster().health(clusterHealthRequest().waitForGreenStatus().waitForNodes("2")).actionGet(); logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); @@ -123,6 +118,7 @@ public class IndexLifecycleActionIT extends ESIntegTestCase { String server_3 = internalCluster().startNode(); // first wait for 3 nodes in the cluster + logger.info("Waiting for replicas to be assigned"); clusterHealth = client().admin().cluster().health(clusterHealthRequest().waitForGreenStatus().waitForNodes("3")).actionGet(); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); diff --git a/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java b/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java index 1f1b758f349..b7eb833106e 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndexingMemoryControllerTests.java @@ -22,9 +22,8 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.IndexService; @@ -35,6 +34,7 @@ import org.elasticsearch.index.shard.IndexShardTests; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.threadpool.ThreadPool.Cancellable; import java.io.IOException; import java.util.ArrayList; @@ -44,7 +44,6 @@ import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.ScheduledFuture; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -70,8 +69,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { super(Settings.builder() .put("indices.memory.interval", "200h") // disable it .put(settings) - .build(), - null, null, 100 * 1024 * 1024); // fix jvm mem size to 100mb + .build(), null, null); } public void deleteShard(IndexShard shard) { @@ -162,7 +160,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { } @Override - protected ScheduledFuture scheduleTask(ThreadPool threadPool) { + protected Cancellable scheduleTask(ThreadPool threadPool) { return null; } } @@ -392,7 +390,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { } @Override - protected ScheduledFuture scheduleTask(ThreadPool threadPool) { + protected Cancellable scheduleTask(ThreadPool threadPool) { return null; } }; @@ -449,7 +447,7 @@ public class IndexingMemoryControllerTests extends ESSingleNodeTestCase { try { assertEquals(0, imc.availableShards().size()); ShardRouting routing = newShard.routingEntry(); - DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.shardId(), routing.primary(), RecoveryState.Type.STORE, localNode, localNode)); assertEquals(1, imc.availableShards().size()); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java index 92a411a95de..17a4b93c240 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesLifecycleListenerSingleNodeTests.java @@ -25,7 +25,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.NodeServicesProvider; @@ -103,7 +103,7 @@ public class IndicesLifecycleListenerSingleNodeTests extends ESSingleNodeTestCas newRouting = ShardRoutingHelper.initialize(newRouting, nodeId); IndexShard shard = index.createShard(newRouting); shard.updateRoutingEntry(newRouting); - final DiscoveryNode localNode = new DiscoveryNode("foo", DummyTransportAddress.INSTANCE, + final DiscoveryNode localNode = new DiscoveryNode("foo", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT); shard.markAsRecovering("store", new RecoveryState(shard.shardId(), newRouting.primary(), RecoveryState.Type.SNAPSHOT, newRouting.restoreSource(), localNode)); shard.recoverFromStore(); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index 52bb09d38b4..f1587081126 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -74,7 +74,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { public void testSpecifiedIndexUnavailableMultipleIndices() throws Exception { assertAcked(prepareCreate("test1")); - ensureYellow(); // Verify defaults verify(search("test1", "test2"), true); @@ -129,7 +128,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { options = IndicesOptions.strictExpandOpen(); assertAcked(prepareCreate("test2")); - ensureYellow(); verify(search("test1", "test2").setIndicesOptions(options), false); verify(msearch(options, "test1", "test2").setIndicesOptions(options), false); verify(clearCache("test1", "test2").setIndicesOptions(options), false); @@ -247,7 +245,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(getSettings("test1").setIndicesOptions(options), false); assertAcked(prepareCreate("test1")); - ensureYellow(); options = IndicesOptions.strictExpandOpenAndForbidClosed(); verify(search("test1").setIndicesOptions(options), false); @@ -447,7 +444,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { public void testAllMissingStrict() throws Exception { createIndex("test1"); - ensureYellow(); try { client().prepareSearch("test2") .setQuery(matchAllQuery()) @@ -503,7 +499,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { public void testDeleteIndex() throws Exception { createIndex("foobar"); - ensureYellow(); verify(client().admin().indices().prepareDelete("foo"), true); assertThat(client().admin().indices().prepareExists("foobar").get().isExists(), equalTo(true)); @@ -515,7 +510,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(client().admin().indices().prepareDelete("_all"), false); createIndex("foo", "foobar", "bar", "barbaz"); - ensureYellow(); verify(client().admin().indices().prepareDelete("foo*"), false); assertThat(client().admin().indices().prepareExists("foo").get().isExists(), equalTo(false)); @@ -534,7 +528,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { public void testPutAlias() throws Exception { createIndex("foobar"); - ensureYellow(); verify(client().admin().indices().prepareAliases().addAlias("foobar", "foobar_alias"), false); assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("foobar").get().exists(), equalTo(true)); @@ -542,7 +535,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { public void testPutAliasWildcard() throws Exception { createIndex("foo", "foobar", "bar", "barbaz"); - ensureYellow(); verify(client().admin().indices().prepareAliases().addAlias("foo*", "foobar_alias"), false); assertThat(client().admin().indices().prepareAliasesExist("foobar_alias").setIndices("foo").get().exists(), equalTo(true)); @@ -563,7 +555,6 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase { verify(client().admin().indices().preparePutMapping("_all").setType("type1").setSource("field", "type=text"), true); createIndex("foo", "foobar", "bar", "barbaz"); - ensureYellow(); verify(client().admin().indices().preparePutMapping("foo").setType("type1").setSource("field", "type=text"), false); assertThat(client().admin().indices().prepareGetMappings("foo").get().mappings().get("foo").get("type1"), notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java index fdd7e4c2d67..7b9f2dbb7cf 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java @@ -354,4 +354,67 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { equalTo(0L)); } + public void testCanCache() throws Exception { + assertAcked(client().admin().indices().prepareCreate("index").addMapping("type", "s", "type=date") + .setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, IndexMetaData.SETTING_NUMBER_OF_SHARDS, + 5, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .get()); + indexRandom(true, client().prepareIndex("index", "type", "1").setRouting("1").setSource("s", "2016-03-19"), + client().prepareIndex("index", "type", "2").setRouting("1").setSource("s", "2016-03-20"), + client().prepareIndex("index", "type", "3").setRouting("1").setSource("s", "2016-03-21"), + client().prepareIndex("index", "type", "4").setRouting("2").setSource("s", "2016-03-22"), + client().prepareIndex("index", "type", "5").setRouting("2").setSource("s", "2016-03-23"), + client().prepareIndex("index", "type", "6").setRouting("2").setSource("s", "2016-03-24"), + client().prepareIndex("index", "type", "7").setRouting("3").setSource("s", "2016-03-25"), + client().prepareIndex("index", "type", "8").setRouting("3").setSource("s", "2016-03-26"), + client().prepareIndex("index", "type", "9").setRouting("3").setSource("s", "2016-03-27")); + ensureSearchable("index"); + + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(0L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(0L)); + + // If size > 0 we should no cache by default + final SearchResponse r1 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(1) + .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-25")).get(); + assertSearchResponse(r1); + assertThat(r1.getHits().getTotalHits(), equalTo(7L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(0L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(0L)); + + // If search type is DFS_QUERY_THEN_FETCH we should not cache + final SearchResponse r2 = client().prepareSearch("index").setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setSize(0) + .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")).get(); + assertSearchResponse(r2); + assertThat(r2.getHits().getTotalHits(), equalTo(7L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(0L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(0L)); + + // If search type is DFS_QUERY_THEN_FETCH we should not cache even if + // the cache flag is explicitly set on the request + final SearchResponse r3 = client().prepareSearch("index").setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setSize(0) + .setRequestCache(true).setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")).get(); + assertSearchResponse(r3); + assertThat(r3.getHits().getTotalHits(), equalTo(7L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(0L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(0L)); + + // If size > 1 and cache flag is set on the request we should cache + final SearchResponse r4 = client().prepareSearch("index").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(1) + .setRequestCache(true).setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-21").lte("2016-03-27")).get(); + assertSearchResponse(r4); + assertThat(r4.getHits().getTotalHits(), equalTo(7L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), + equalTo(0L)); + assertThat(client().admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), + equalTo(5L)); + } + } diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index 1cca3bb7215..d43217d9785 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -62,7 +62,7 @@ public class IndicesRequestCacheTests extends ESTestCase { // initial cache TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); BytesReference value = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value).readString()); + assertEquals("foo", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -72,7 +72,7 @@ public class IndicesRequestCacheTests extends ESTestCase { // cache hit entity = new TestEntity(requestCacheStats, reader, indexShard, 0); value = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value).readString()); + assertEquals("foo", value.streamInput().readString()); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -117,7 +117,7 @@ public class IndicesRequestCacheTests extends ESTestCase { // initial cache TestEntity entity = new TestEntity(requestCacheStats, reader, indexShard, 0); BytesReference value = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value).readString()); + assertEquals("foo", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -130,7 +130,7 @@ public class IndicesRequestCacheTests extends ESTestCase { // cache the second TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); value = cache.getOrCompute(secondEntity, secondReader, termQuery.buildAsBytes()); - assertEquals("bar", StreamInput.wrap(value).readString()); + assertEquals("bar", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -141,7 +141,7 @@ public class IndicesRequestCacheTests extends ESTestCase { secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); value = cache.getOrCompute(secondEntity, secondReader, termQuery.buildAsBytes()); - assertEquals("bar", StreamInput.wrap(value).readString()); + assertEquals("bar", value.streamInput().readString()); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -150,7 +150,7 @@ public class IndicesRequestCacheTests extends ESTestCase { entity = new TestEntity(requestCacheStats, reader, indexShard, 0); value = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value).readString()); + assertEquals("foo", value.streamInput().readString()); assertEquals(2, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -209,9 +209,9 @@ public class IndicesRequestCacheTests extends ESTestCase { TestEntity secondEntity = new TestEntity(requestCacheStats, secondReader, indexShard, 0); BytesReference value1 = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value1).readString()); + assertEquals("foo", value1.streamInput().readString()); BytesReference value2 = cache.getOrCompute(secondEntity, secondReader, termQuery.buildAsBytes()); - assertEquals("bar", StreamInput.wrap(value2).readString()); + assertEquals("bar", value2.streamInput().readString()); size = requestCacheStats.stats().getMemorySize(); IOUtils.close(reader, secondReader, writer, dir, cache); } @@ -240,12 +240,12 @@ public class IndicesRequestCacheTests extends ESTestCase { TestEntity thirddEntity = new TestEntity(requestCacheStats, thirdReader, indexShard, 0); BytesReference value1 = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value1).readString()); + assertEquals("foo", value1.streamInput().readString()); BytesReference value2 = cache.getOrCompute(secondEntity, secondReader, termQuery.buildAsBytes()); - assertEquals("bar", StreamInput.wrap(value2).readString()); + assertEquals("bar", value2.streamInput().readString()); logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); BytesReference value3 = cache.getOrCompute(thirddEntity, thirdReader, termQuery.buildAsBytes()); - assertEquals("baz", StreamInput.wrap(value3).readString()); + assertEquals("baz", value3.streamInput().readString()); assertEquals(2, cache.count()); assertEquals(1, requestCacheStats.stats().getEvictions()); IOUtils.close(reader, secondReader, thirdReader, writer, dir, cache); @@ -277,12 +277,12 @@ public class IndicesRequestCacheTests extends ESTestCase { TestEntity thirddEntity = new TestEntity(requestCacheStats, thirdReader, differentIdentity, 0); BytesReference value1 = cache.getOrCompute(entity, reader, termQuery.buildAsBytes()); - assertEquals("foo", StreamInput.wrap(value1).readString()); + assertEquals("foo", value1.streamInput().readString()); BytesReference value2 = cache.getOrCompute(secondEntity, secondReader, termQuery.buildAsBytes()); - assertEquals("bar", StreamInput.wrap(value2).readString()); + assertEquals("bar", value2.streamInput().readString()); logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); BytesReference value3 = cache.getOrCompute(thirddEntity, thirdReader, termQuery.buildAsBytes()); - assertEquals("baz", StreamInput.wrap(value3).readString()); + assertEquals("baz", value3.streamInput().readString()); assertEquals(3, cache.count()); final long hitCount = requestCacheStats.stats().getHitCount(); // clear all for the indexShard Idendity even though is't still open @@ -292,7 +292,7 @@ public class IndicesRequestCacheTests extends ESTestCase { // third has not been validated since it's a different identity value3 = cache.getOrCompute(thirddEntity, thirdReader, termQuery.buildAsBytes()); assertEquals(hitCount + 1, requestCacheStats.stats().getHitCount()); - assertEquals("baz", StreamInput.wrap(value3).readString()); + assertEquals("baz", value3.streamInput().readString()); IOUtils.close(reader, secondReader, thirdReader, writer, dir, cache); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 75c968e1d67..27bb2cccff4 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -285,9 +285,6 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { listener.latch.await(); assertThat(clusterService.state(), not(originalState)); assertNotNull(clusterService.state().getMetaData().index(alias)); - - // cleanup - indicesService.deleteIndex(test.index(), "finished with test"); } /** diff --git a/core/src/test/java/org/elasticsearch/indices/TermsLookupTests.java b/core/src/test/java/org/elasticsearch/indices/TermsLookupTests.java index 59d86ddce67..fea69133377 100644 --- a/core/src/test/java/org/elasticsearch/indices/TermsLookupTests.java +++ b/core/src/test/java/org/elasticsearch/indices/TermsLookupTests.java @@ -70,7 +70,7 @@ public class TermsLookupTests extends ESTestCase { TermsLookup termsLookup = randomTermsLookup(); try (BytesStreamOutput output = new BytesStreamOutput()) { termsLookup.writeTo(output); - try (StreamInput in = StreamInput.wrap(output.bytes())) { + try (StreamInput in = output.bytes().streamInput()) { TermsLookup deserializedLookup = new TermsLookup(in); assertEquals(deserializedLookup, termsLookup); assertEquals(deserializedLookup.hashCode(), termsLookup.hashCode()); diff --git a/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index 869ac622b39..7ca0df72f8f 100644 --- a/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/core/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.ar.ArabicNormalizationFilter; +import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; import org.apache.lucene.analysis.fa.PersianNormalizationFilter; import org.apache.lucene.analysis.hunspell.Dictionary; @@ -96,7 +97,7 @@ public class AnalysisModuleTests extends ModuleTestCase { throw new RuntimeException(e); } } - + private Settings loadFromClasspath(String path) throws IOException { return Settings.builder().loadFromStream(path, getClass().getResourceAsStream(path)) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) @@ -121,6 +122,33 @@ public class AnalysisModuleTests extends ModuleTestCase { assertTokenFilter("arabic_normalization", ArabicNormalizationFilter.class); } + public void testAnalyzerAlias() throws IOException { + Settings settings = Settings.builder() + .put("index.analysis.analyzer.foobar.alias","default") + .put("index.analysis.analyzer.foobar.type", "keyword") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) + .build(); + AnalysisRegistry newRegistry = getNewRegistry(settings); + AnalysisService as = getAnalysisService(newRegistry, settings); + assertThat(as.analyzer("default").analyzer(), is(instanceOf(KeywordAnalyzer.class))); + + } + + public void testDoubleAlias() throws IOException { + Settings settings = Settings.builder() + .put("index.analysis.analyzer.foobar.alias","default") + .put("index.analysis.analyzer.foobar.type", "keyword") + .put("index.analysis.analyzer.barfoo.alias","default") + .put("index.analysis.analyzer.barfoo.type","english") + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0) + .build(); + AnalysisRegistry newRegistry = getNewRegistry(settings); + String message = expectThrows(IllegalStateException.class, () -> getAnalysisService(newRegistry, settings)).getMessage(); + assertEquals("already registered analyzer with name: default", message); + } + public void testVersionedAnalyzers() throws Exception { String yaml = "/org/elasticsearch/index/analysis/test1.yml"; Settings settings2 = Settings.builder() diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index 4312dd6105e..8e63653dfad 100644 --- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -26,6 +26,8 @@ import org.elasticsearch.test.ESIntegTestCase; import org.hamcrest.core.IsNull; import java.io.IOException; +import java.util.HashMap; +import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; @@ -92,11 +94,11 @@ public class AnalyzeActionIT extends ESIntegTestCase { AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setAnalyzer("simple").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(4)); - analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("keyword").setTokenFilters("lowercase").get(); + analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("keyword").addTokenFilter("lowercase").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(1)); assertThat(analyzeResponse.getTokens().get(0).getTerm(), equalTo("this is a test")); - analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("standard").setTokenFilters("lowercase", "reverse").get(); + analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("standard").addTokenFilter("lowercase").addTokenFilter("reverse").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(4)); AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0); assertThat(token.getTerm(), equalTo("siht")); @@ -107,7 +109,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { token = analyzeResponse.getTokens().get(3); assertThat(token.getTerm(), equalTo("tset")); - analyzeResponse = client().admin().indices().prepareAnalyze("of course").setTokenizer("standard").setTokenFilters("stop").get(); + analyzeResponse = client().admin().indices().prepareAnalyze("of course").setTokenizer("standard").addTokenFilter("stop").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(1)); assertThat(analyzeResponse.getTokens().get(0).getTerm(), equalTo("course")); assertThat(analyzeResponse.getTokens().get(0).getPosition(), equalTo(1)); @@ -125,18 +127,18 @@ public class AnalyzeActionIT extends ESIntegTestCase { .putArray("index.analysis.analyzer.custom_with_char_filter.char_filter", "custom_mapping"))); ensureGreen(); - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("

THIS IS A

TEST").setTokenizer("standard").setCharFilters("html_strip").get(); + AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("

THIS IS A

TEST").setTokenizer("standard").addCharFilter("html_strip").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(4)); - analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("keyword").setTokenFilters("lowercase").setCharFilters("html_strip").get(); + analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("keyword").addTokenFilter("lowercase").addCharFilter("html_strip").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(1)); assertThat(analyzeResponse.getTokens().get(0).getTerm(), equalTo("this is a test")); - analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "jeff quit phish").setTokenizer("keyword").setTokenFilters("lowercase").setCharFilters("custom_mapping").get(); + analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "jeff quit phish").setTokenizer("keyword").addTokenFilter("lowercase").addCharFilter("custom_mapping").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(1)); assertThat(analyzeResponse.getTokens().get(0).getTerm(), equalTo("jeff qit fish")); - analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "jeff quit fish").setTokenizer("standard").setCharFilters("html_strip", "custom_mapping").get(); + analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "jeff quit fish").setTokenizer("standard").addCharFilter("html_strip").addCharFilter("custom_mapping").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(3)); AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0); assertThat(token.getTerm(), equalTo("jeff")); @@ -233,11 +235,10 @@ public class AnalyzeActionIT extends ESIntegTestCase { for (int i = 0; i < 10; i++) { AnalyzeResponse analyzeResponse = admin().indices().prepareAnalyze().setIndex(indexOrAlias()).setText("THIS IS A PHISH") - .setExplain(true).setCharFilters("my_mapping").setTokenizer("keyword").setTokenFilters("lowercase").get(); + .setExplain(true).addCharFilter("my_mapping").setTokenizer("keyword").addTokenFilter("lowercase").get(); assertThat(analyzeResponse.detail().analyzer(), IsNull.nullValue()); //charfilters - // global charfilter is not change text. assertThat(analyzeResponse.detail().charfilters().length, equalTo(1)); assertThat(analyzeResponse.detail().charfilters()[0].getName(), equalTo("my_mapping")); assertThat(analyzeResponse.detail().charfilters()[0].getTexts().length, equalTo(1)); @@ -284,10 +285,9 @@ public class AnalyzeActionIT extends ESIntegTestCase { //custom analyzer analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") - .setExplain(true).setCharFilters("html_strip").setTokenizer("keyword").setTokenFilters("lowercase").get(); + .setExplain(true).addCharFilter("html_strip").setTokenizer("keyword").addTokenFilter("lowercase").get(); assertThat(analyzeResponse.detail().analyzer(), IsNull.nullValue()); //charfilters - // global charfilter is not change text. assertThat(analyzeResponse.detail().charfilters().length, equalTo(1)); assertThat(analyzeResponse.detail().charfilters()[0].getName(), equalTo("html_strip")); assertThat(analyzeResponse.detail().charfilters()[0].getTexts().length, equalTo(1)); @@ -305,7 +305,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { //check other attributes analyzeResponse = client().admin().indices().prepareAnalyze("This is troubled") - .setExplain(true).setTokenizer("standard").setTokenFilters("snowball").get(); + .setExplain(true).setTokenizer("standard").addTokenFilter("snowball").get(); assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(1)); assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("snowball")); @@ -326,7 +326,7 @@ public class AnalyzeActionIT extends ESIntegTestCase { public void testDetailAnalyzeSpecifyAttributes() throws Exception { AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("This is troubled") - .setExplain(true).setTokenizer("standard").setTokenFilters("snowball").setAttributes("keyword").get(); + .setExplain(true).setTokenizer("standard").addTokenFilter("snowball").setAttributes("keyword").get(); assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(1)); assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("snowball")); @@ -456,12 +456,132 @@ public class AnalyzeActionIT extends ESIntegTestCase { .setAnalyzer("not_exist_analyzer") .get(); fail("shouldn't get here"); - } catch (Throwable t) { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), startsWith("failed to find global analyzer")); + } catch (Exception e) { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), startsWith("failed to find global analyzer")); } } + public void testCustomTokenFilterInRequest() throws Exception { + Map stopFilterSettings = new HashMap<>(); + stopFilterSettings.put("type", "stop"); + stopFilterSettings.put("stopwords", new String[]{"foo", "buzz"}); + AnalyzeResponse analyzeResponse = client().admin().indices() + .prepareAnalyze() + .setText("Foo buzz test") + .setTokenizer("whitespace") + .addTokenFilter("lowercase") + .addTokenFilter(stopFilterSettings) + .setExplain(true) + .get(); + + //tokenizer + assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("whitespace")); + assertThat(analyzeResponse.detail().tokenizer().getTokens().length, equalTo(3)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getTerm(), equalTo("Foo")); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getStartOffset(), equalTo(0)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getEndOffset(), equalTo(3)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getPosition(), equalTo(0)); + + assertThat(analyzeResponse.detail().tokenizer().getTokens()[1].getTerm(), equalTo("buzz")); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[1].getStartOffset(), equalTo(4)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[1].getEndOffset(), equalTo(8)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[1].getPosition(), equalTo(1)); + + assertThat(analyzeResponse.detail().tokenizer().getTokens()[2].getTerm(), equalTo("test")); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[2].getStartOffset(), equalTo(9)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[2].getEndOffset(), equalTo(13)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[2].getPosition(), equalTo(2)); + + // tokenfilter(lowercase) + assertThat(analyzeResponse.detail().tokenfilters().length, equalTo(2)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getName(), equalTo("lowercase")); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens().length, equalTo(3)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getTerm(), equalTo("foo")); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getStartOffset(), equalTo(0)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getEndOffset(), equalTo(3)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[0].getPosition(), equalTo(0)); + + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[1].getTerm(), equalTo("buzz")); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[1].getStartOffset(), equalTo(4)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[1].getEndOffset(), equalTo(8)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[1].getPosition(), equalTo(1)); + + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getTerm(), equalTo("test")); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getStartOffset(), equalTo(9)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getEndOffset(), equalTo(13)); + assertThat(analyzeResponse.detail().tokenfilters()[0].getTokens()[2].getPosition(), equalTo(2)); + + // tokenfilter({"type": "stop", "stopwords": ["foo", "buzz"]}) + assertThat(analyzeResponse.detail().tokenfilters()[1].getName(), equalTo("_anonymous_tokenfilter_[1]")); + assertThat(analyzeResponse.detail().tokenfilters()[1].getTokens().length, equalTo(1)); + + assertThat(analyzeResponse.detail().tokenfilters()[1].getTokens()[0].getTerm(), equalTo("test")); + assertThat(analyzeResponse.detail().tokenfilters()[1].getTokens()[0].getStartOffset(), equalTo(9)); + assertThat(analyzeResponse.detail().tokenfilters()[1].getTokens()[0].getEndOffset(), equalTo(13)); + assertThat(analyzeResponse.detail().tokenfilters()[1].getTokens()[0].getPosition(), equalTo(2)); + } + + + public void testCustomCharFilterInRequest() throws Exception { + Map charFilterSettings = new HashMap<>(); + charFilterSettings.put("type", "mapping"); + charFilterSettings.put("mappings", new String[]{"ph => f", "qu => q"}); + AnalyzeResponse analyzeResponse = client().admin().indices() + .prepareAnalyze() + .setText("jeff quit phish") + .setTokenizer("keyword") + .addCharFilter(charFilterSettings) + .setExplain(true) + .get(); + + assertThat(analyzeResponse.detail().analyzer(), IsNull.nullValue()); + //charfilters + assertThat(analyzeResponse.detail().charfilters().length, equalTo(1)); + assertThat(analyzeResponse.detail().charfilters()[0].getName(), equalTo("_anonymous_charfilter_[0]")); + assertThat(analyzeResponse.detail().charfilters()[0].getTexts().length, equalTo(1)); + assertThat(analyzeResponse.detail().charfilters()[0].getTexts()[0], equalTo("jeff qit fish")); + //tokenizer + assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("keyword")); + assertThat(analyzeResponse.detail().tokenizer().getTokens().length, equalTo(1)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getTerm(), equalTo("jeff qit fish")); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getStartOffset(), equalTo(0)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getEndOffset(), equalTo(15)); + } + + + public void testCustomTokenizerInRequest() throws Exception { + Map tokenizerSettings = new HashMap<>(); + tokenizerSettings.put("type", "nGram"); + tokenizerSettings.put("min_gram", 2); + tokenizerSettings.put("max_gram", 2); + + AnalyzeResponse analyzeResponse = client().admin().indices() + .prepareAnalyze() + .setText("good") + .setTokenizer(tokenizerSettings) + .setExplain(true) + .get(); + + //tokenizer + assertThat(analyzeResponse.detail().tokenizer().getName(), equalTo("_anonymous_tokenizer")); + assertThat(analyzeResponse.detail().tokenizer().getTokens().length, equalTo(3)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getTerm(), equalTo("go")); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getStartOffset(), equalTo(0)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getEndOffset(), equalTo(2)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[0].getPosition(), equalTo(0)); + + assertThat(analyzeResponse.detail().tokenizer().getTokens()[1].getTerm(), equalTo("oo")); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[1].getStartOffset(), equalTo(1)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[1].getEndOffset(), equalTo(3)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[1].getPosition(), equalTo(1)); + + assertThat(analyzeResponse.detail().tokenizer().getTokens()[2].getTerm(), equalTo("od")); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[2].getStartOffset(), equalTo(2)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[2].getEndOffset(), equalTo(4)); + assertThat(analyzeResponse.detail().tokenizer().getTokens()[2].getPosition(), equalTo(2)); + } + } diff --git a/core/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java b/core/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java index 69bee510710..034cbeb636d 100644 --- a/core/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java +++ b/core/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java @@ -43,6 +43,7 @@ import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.indices.recovery.RecoveryTargetService; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; import java.io.IOException; import java.util.HashMap; @@ -61,9 +62,15 @@ import static org.hamcrest.Matchers.equalTo; */ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestCase { + private boolean enableRandomFailures; + + @Before + public void injectRandomFailures() { + enableRandomFailures = randomBoolean(); + } protected void failRandomly() { - if (rarely()) { + if (enableRandomFailures && rarely()) { throw new RuntimeException("dummy test failure"); } } @@ -73,12 +80,15 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC * * @param state cluster state used for matching */ - public static void assertClusterStateMatchesNodeState(ClusterState state, IndicesClusterStateService indicesClusterStateService) { + public void assertClusterStateMatchesNodeState(ClusterState state, IndicesClusterStateService indicesClusterStateService) { AllocatedIndices> indicesService = indicesClusterStateService.indicesService; ConcurrentMap failedShardsCache = indicesClusterStateService.failedShardsCache; RoutingNode localRoutingNode = state.getRoutingNodes().node(state.getNodes().getLocalNodeId()); if (localRoutingNode != null) { + if (enableRandomFailures == false) { + assertTrue("failed shard cache should be empty", failedShardsCache.isEmpty()); + } // check that all shards in local routing nodes have been allocated for (ShardRouting shardRouting : localRoutingNode) { Index index = shardRouting.index(); @@ -86,11 +96,17 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC Shard shard = indicesService.getShardOrNull(shardRouting.shardId()); ShardRouting failedShard = failedShardsCache.get(shardRouting.shardId()); - if (shard == null && failedShard == null) { - fail("Shard with id " + shardRouting + " expected but missing in indicesService and failedShardsCache"); - } - if (failedShard != null && failedShard.isSameAllocation(shardRouting) == false) { - fail("Shard cache has not been properly cleaned for " + failedShard); + if (enableRandomFailures) { + if (shard == null && failedShard == null) { + fail("Shard with id " + shardRouting + " expected but missing in indicesService and failedShardsCache"); + } + if (failedShard != null && failedShard.isSameAllocation(shardRouting) == false) { + fail("Shard cache has not been properly cleaned for " + failedShard); + } + } else { + if (shard == null) { + fail("Shard with id " + shardRouting + " expected but missing in indicesService"); + } } if (shard != null) { @@ -100,9 +116,8 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC // index metadata has been updated assertThat(indexService.getIndexSettings().getIndexMetaData(), equalTo(indexMetaData)); // shard has been created - if (failedShard == null) { - assertTrue("Shard with id " + shardRouting + " expected but missing in indexService", - shard != null); + if (enableRandomFailures == false || failedShard == null) { + assertTrue("Shard with id " + shardRouting + " expected but missing in indexService", shard != null); // shard has latest shard routing assertThat(shard.routingEntry(), equalTo(shardRouting)); } @@ -118,19 +133,23 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC for (Shard shard : indexService) { shardsFound = true; ShardRouting persistedShardRouting = shard.routingEntry(); - boolean found = false; - for (ShardRouting shardRouting : localRoutingNode) { - if (persistedShardRouting.equals(shardRouting)) { - found = true; - } + ShardRouting shardRouting = localRoutingNode.getByShardId(persistedShardRouting.shardId()); + if (shardRouting == null) { + fail("Shard with id " + persistedShardRouting + " locally exists but missing in routing table"); + } + if (shardRouting.equals(persistedShardRouting) == false) { + fail("Local shard " + persistedShardRouting + " has stale routing" + shardRouting); } - assertTrue(found); } if (shardsFound == false) { - // check if we have shards of that index in failedShardsCache - // if yes, we might not have cleaned the index as failedShardsCache can be populated by another thread - assertFalse(failedShardsCache.keySet().stream().noneMatch(shardId -> shardId.getIndex().equals(indexService.index()))); + if (enableRandomFailures) { + // check if we have shards of that index in failedShardsCache + // if yes, we might not have cleaned the index as failedShardsCache can be populated by another thread + assertFalse(failedShardsCache.keySet().stream().noneMatch(shardId -> shardId.getIndex().equals(indexService.index()))); + } else { + fail("index service for index " + indexService.index() + " has no shards"); + } } } @@ -181,7 +200,8 @@ public abstract class AbstractIndicesClusterStateServiceTestCase extends ESTestC } @Override - public @Nullable MockIndexService indexService(Index index) { + @Nullable + public MockIndexService indexService(Index index) { return indices.get(index.getUUID()); } diff --git a/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index 09441f70110..1a8caaa3514 100644 --- a/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -158,7 +158,7 @@ public class ClusterStateChanges { allocationService, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, indicesService, nodeServicesProvider); MetaDataCreateIndexService createIndexService = new MetaDataCreateIndexService(settings, clusterService, indicesService, allocationService, new AliasValidator(settings), Collections.emptySet(), environment, - nodeServicesProvider, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + nodeServicesProvider, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, threadPool); transportCloseIndexAction = new TransportCloseIndexAction(settings, transportService, clusterService, threadPool, indexStateService, clusterSettings, actionFilters, indexNameExpressionResolver, destructiveOperations); diff --git a/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java index 78ef13dde56..c2ccb9cd4ab 100644 --- a/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java +++ b/core/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; @@ -37,7 +38,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation.FailedShard; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.indices.recovery.RecoveryTargetService; import org.elasticsearch.repositories.RepositoriesService; @@ -123,7 +124,7 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice for (Iterator> it = clusterStateServiceMap.entrySet().iterator(); it.hasNext(); ) { DiscoveryNode node = it.next().getKey(); - if (state.nodes().nodeExists(node.getId()) == false) { + if (state.nodes().nodeExists(node) == false) { it.remove(); } } @@ -140,7 +141,7 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice CreateIndexRequest request = new CreateIndexRequest(name, Settings.builder() .put(SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 3)) .put(SETTING_NUMBER_OF_REPLICAS, randomInt(2)) - .build()); + .build()).waitForActiveShards(ActiveShardCount.NONE); state = cluster.createIndex(state, request); assertTrue(state.metaData().hasIndex(name)); } @@ -254,7 +255,7 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice for (DiscoveryNode.Role mustHaveRole : mustHaveRoles) { roles.add(mustHaveRole); } - return new DiscoveryNode("node_" + randomAsciiOfLength(8), DummyTransportAddress.INSTANCE, Collections.emptyMap(), roles, + return new DiscoveryNode("node_" + randomAsciiOfLength(8), LocalTransportAddress.buildUnique(), Collections.emptyMap(), roles, Version.CURRENT); } @@ -270,7 +271,7 @@ public class IndicesClusterStateServiceRandomUpdatesTests extends AbstractIndice final TransportService transportService = new TransportService(Settings.EMPTY, null, threadPool); final ClusterService clusterService = mock(ClusterService.class); final RepositoriesService repositoriesService = new RepositoriesService(Settings.EMPTY, clusterService, - transportService, null, null); + transportService, null); final RecoveryTargetService recoveryTargetService = new RecoveryTargetService(Settings.EMPTY, threadPool, transportService, null, clusterService); final ShardStateAction shardStateAction = mock(ShardStateAction.class); diff --git a/core/src/test/java/org/elasticsearch/indices/exists/indices/IndicesExistsIT.java b/core/src/test/java/org/elasticsearch/indices/exists/indices/IndicesExistsIT.java index 38bea16f83c..6052add7e36 100644 --- a/core/src/test/java/org/elasticsearch/indices/exists/indices/IndicesExistsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/exists/indices/IndicesExistsIT.java @@ -41,7 +41,6 @@ public class IndicesExistsIT extends ESIntegTestCase { assertThat(client().admin().indices().prepareExists("_all").get().isExists(), equalTo(false)); createIndex("foo", "foobar", "bar", "barbaz"); - ensureYellow(); assertThat(client().admin().indices().prepareExists("foo*").get().isExists(), equalTo(true)); assertThat(client().admin().indices().prepareExists("foobar").get().isExists(), equalTo(true)); @@ -52,7 +51,6 @@ public class IndicesExistsIT extends ESIntegTestCase { public void testIndicesExistsWithBlocks() { createIndex("ro"); - ensureYellow(); // Request is not blocked for (String blockSetting : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) { diff --git a/core/src/test/java/org/elasticsearch/indices/exists/types/TypesExistsIT.java b/core/src/test/java/org/elasticsearch/indices/exists/types/TypesExistsIT.java index 407ee6fbc43..ffe44b3e1c1 100644 --- a/core/src/test/java/org/elasticsearch/indices/exists/types/TypesExistsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/exists/types/TypesExistsIT.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.indices.exists.types; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.exists.types.TypesExistsResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -39,17 +39,16 @@ import static org.hamcrest.Matchers.equalTo; public class TypesExistsIT extends ESIntegTestCase { public void testSimple() throws Exception { Client client = client(); - client.admin().indices().prepareCreate("test1") + CreateIndexResponse response1 = client.admin().indices().prepareCreate("test1") .addMapping("type1", jsonBuilder().startObject().startObject("type1").endObject().endObject()) .addMapping("type2", jsonBuilder().startObject().startObject("type2").endObject().endObject()) .execute().actionGet(); - client.admin().indices().prepareCreate("test2") + CreateIndexResponse response2 = client.admin().indices().prepareCreate("test2") .addMapping("type1", jsonBuilder().startObject().startObject("type1").endObject().endObject()) .execute().actionGet(); client.admin().indices().prepareAliases().addAlias("test1", "alias1").execute().actionGet(); - ClusterHealthResponse healthResponse = client.admin().cluster() - .prepareHealth("test1", "test2").setWaitForYellowStatus().execute().actionGet(); - assertThat(healthResponse.isTimedOut(), equalTo(false)); + assertAcked(response1); + assertAcked(response2); TypesExistsResponse response = client.admin().indices().prepareTypesExists("test1").setTypes("type1").execute().actionGet(); assertThat(response.isExists(), equalTo(true)); diff --git a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java index 8c724efdfc7..7d950a73837 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/FlushIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.flush.SyncedFlushResponse; import org.elasticsearch.action.admin.indices.stats.IndexStats; import org.elasticsearch.action.admin.indices.stats.ShardStats; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRouting; @@ -68,14 +69,14 @@ public class FlushIT extends ESIntegTestCase { // don't use assertAllSuccessful it uses a randomized context that belongs to a different thread assertThat("Unexpected ShardFailures: " + Arrays.toString(flushResponse.getShardFailures()), flushResponse.getFailedShards(), equalTo(0)); latch.countDown(); - } catch (Throwable ex) { + } catch (Exception ex) { onFailure(ex); } } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { errors.add(e); latch.countDown(); } @@ -213,7 +214,7 @@ public class FlushIT extends ESIntegTestCase { public void testUnallocatedShardsDoesNotHang() throws InterruptedException { // create an index but disallow allocation - prepareCreate("test").setSettings(Settings.builder().put("index.routing.allocation.include._name", "nonexistent")).get(); + prepareCreate("test").setWaitForActiveShards(ActiveShardCount.NONE).setSettings(Settings.builder().put("index.routing.allocation.include._name", "nonexistent")).get(); // this should not hang but instead immediately return with empty result set List shardsResult = client().admin().indices().prepareSyncedFlush("test").get().getShardsResultPerIndex().get("test"); diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java index 5932434438c..ea2a80bada5 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.indices.flush; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; @@ -31,9 +32,11 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; import java.util.List; import java.util.Map; +import java.util.concurrent.ExecutionException; /** */ @@ -103,7 +106,7 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { assertTrue(response.success()); } - public void testSyncFailsIfOperationIsInFlight() throws InterruptedException { + public void testSyncFailsIfOperationIsInFlight() throws InterruptedException, ExecutionException { createIndex("test"); client().prepareIndex("test", "test", "1").setSource("{}").get(); IndexService test = getInstanceFromNode(IndicesService.class).indexService(resolveIndex("test")); @@ -111,7 +114,9 @@ public class SyncedFlushSingleNodeTests extends ESSingleNodeTestCase { SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); final ShardId shardId = shard.shardId(); - try (Releasable operationLock = shard.acquirePrimaryOperationLock()) { + PlainActionFuture fut = new PlainActionFuture<>(); + shard.acquirePrimaryOperationLock(fut, ThreadPool.Names.INDEX); + try (Releasable operationLock = fut.get()) { SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener<>(); flushService.attemptSyncedFlush(shardId, listener); listener.latch.await(); diff --git a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java index 485ec020c3f..b71ba63a157 100644 --- a/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java +++ b/core/src/test/java/org/elasticsearch/indices/flush/SyncedFlushUtil.java @@ -57,7 +57,7 @@ public class SyncedFlushUtil { public static final class LatchedListener implements ActionListener { public volatile T result; - public volatile Throwable error; + public volatile Exception error; public final CountDownLatch latch = new CountDownLatch(1); @Override @@ -67,7 +67,7 @@ public class SyncedFlushUtil { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { error = e; latch.countDown(); } diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java index 2981f2d110c..406541c0f49 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java @@ -54,7 +54,6 @@ public class ConcurrentDynamicTemplateIT extends ESIntegTestCase { cluster().wipeIndices("test"); assertAcked(prepareCreate("test") .addMapping(mappingType, mapping)); - ensureYellow(); int numDocs = scaledRandomIntBetween(10, 100); final CountDownLatch latch = new CountDownLatch(numDocs); final List throwable = new CopyOnWriteArrayList<>(); @@ -69,7 +68,7 @@ public class ConcurrentDynamicTemplateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { throwable.add(e); latch.countDown(); } @@ -83,4 +82,4 @@ public class ConcurrentDynamicTemplateIT extends ESIntegTestCase { } } -} \ No newline at end of file +} diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java index 9552e638ba7..ecb4436918a 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -46,10 +46,9 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; public class SimpleGetFieldMappingsIT extends ESIntegTestCase { - + public void testGetMappingsWhereThereAreNone() { createIndex("index"); - ensureYellow(); GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings().get(); assertThat(response.mappings().size(), equalTo(1)); assertThat(response.mappings().get("index").size(), equalTo(0)); @@ -63,7 +62,7 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase { .startObject("obj").startObject("properties").startObject("subfield").field("type", "keyword").endObject().endObject().endObject() .endObject().endObject().endObject(); } - + public void testSimpleGetFieldMappings() throws Exception { assertAcked(prepareCreate("indexa") @@ -73,7 +72,6 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase { .addMapping("typeA", getMappingForType("typeA")) .addMapping("typeB", getMappingForType("typeB"))); - ensureYellow(); // Get mappings by full name GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("indexa").setTypes("typeA").setFields("field1", "obj.subfield").get(); @@ -139,7 +137,6 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase { assertAcked(prepareCreate("test").addMapping("type", getMappingForType("type"))); client().prepareIndex("test", "type", "1").setSource("num", 1).get(); - ensureYellow(); GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings().setFields("num", "field1", "obj.subfield").includeDefaults(true).get(); @@ -157,7 +154,6 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase { assertAcked(prepareCreate("index").addMapping("type", getMappingForType("type"))); Map params = new HashMap<>(); params.put("pretty", "true"); - ensureYellow(); GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("index").setTypes("type").setFields("field1", "obj.subfield").get(); XContentBuilder responseBuilder = XContentFactory.jsonBuilder().prettyPrint(); responseBuilder.startObject(); @@ -189,7 +185,6 @@ public class SimpleGetFieldMappingsIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .addMapping("typeA", getMappingForType("typeA")) .addMapping("typeB", getMappingForType("typeB"))); - ensureYellow(); for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) { try { diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index 68a176e22c3..ee05e0ae526 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -43,6 +43,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_METADATA; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_BLOCKS_READ; @@ -254,11 +255,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { public void testUpdateMappingConcurrently() throws Throwable { createIndex("test1", "test2"); - // This is important. The test assumes all nodes are aware of all indices. Due to initializing shard throttling - // not all shards are allocated with the initial create index. Wait for it.. - ensureYellow(); - - final Throwable[] threadException = new Throwable[1]; + final AtomicReference threadException = new AtomicReference<>(); final AtomicBoolean stop = new AtomicBoolean(false); Thread[] threads = new Thread[3]; final CyclicBarrier barrier = new CyclicBarrier(threads.length); @@ -298,8 +295,8 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { assertThat(mappings.containsKey(typeName), equalTo(true)); assertThat(((Map) mappings.get(typeName).getSourceAsMap().get("properties")).keySet(), Matchers.hasItem(fieldName)); } - } catch (Throwable t) { - threadException[0] = t; + } catch (Exception e) { + threadException.set(e); stop.set(true); } } @@ -311,8 +308,8 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { for (Thread t : threads) t.join(); - if (threadException[0] != null) { - throw threadException[0]; + if (threadException.get() != null) { + throw threadException.get(); } } diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java index 0ae8f71c742..b448f35c21b 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -28,6 +28,8 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; @@ -62,7 +64,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.startsWith; @@ -70,7 +71,7 @@ import static org.hamcrest.Matchers.startsWith; /** * Integration tests for InternalCircuitBreakerService */ -@ClusterScope(scope = TEST, randomDynamicTemplates = false) +@ClusterScope(scope = TEST, randomDynamicTemplates = false, numClientNodes = 0, maxNumDataNodes = 1) public class CircuitBreakerServiceIT extends ESIntegTestCase { /** Reset all breaker settings back to their defaults */ private void reset() { @@ -266,17 +267,26 @@ public class CircuitBreakerServiceIT extends ESIntegTestCase { // Perform a search to load field data for the "test" field try { - client.prepareSearch("cb-test").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get(); - fail("should have thrown an exception"); + SearchResponse searchResponse = client.prepareSearch("cb-test").setQuery(matchAllQuery()).addSort("test", SortOrder.DESC).get(); + if (searchResponse.getShardFailures().length > 0) { + // each shard must have failed with CircuitBreakingException + for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) { + Throwable cause = ExceptionsHelper.unwrap(shardSearchFailure.getCause(), CircuitBreakingException.class); + assertThat(cause, instanceOf(CircuitBreakingException.class)); + assertEquals(((CircuitBreakingException) cause).getByteLimit(), 500L); + } + } else { + fail("should have thrown a CircuitBreakingException"); + } } catch (Exception e) { - final Throwable cause = ExceptionsHelper.unwrap(e, CircuitBreakingException.class); - assertNotNull("CircuitBreakingException is not the cause of " + e, cause); - String errMsg = "would be larger than limit of [500/500b]]"; - assertThat("Exception: [" + cause.toString() + "] should contain a CircuitBreakingException", + Throwable cause = ExceptionsHelper.unwrap(e, CircuitBreakingException.class); + assertThat(cause, instanceOf(CircuitBreakingException.class)); + assertEquals(((CircuitBreakingException) cause).getByteLimit(), 500L); + assertThat("Exception: [" + cause.toString() + "] should be caused by the parent circuit breaker", cause.toString(), startsWith("CircuitBreakingException[[parent] Data too large")); - assertThat("Exception: [" + cause.toString() + "] should contain a CircuitBreakingException", - cause.toString(), endsWith(errMsg)); } + + reset(); } public void testRequestBreaker() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 4596479d2c8..369692de2a7 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -22,18 +22,16 @@ package org.elasticsearch.indices.memory.breaker; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReader; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.client.Requests; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.MockEngineFactoryPlugin; @@ -114,18 +112,20 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { .put(EXCEPTION_LOW_LEVEL_RATIO_KEY, lowLevelRate) .put(MockEngineSupport.WRAP_READER_RATIO.getKey(), 1.0d); logger.info("creating index: [test] using settings: [{}]", settings.build().getAsMap()); - client().admin().indices().prepareCreate("test") + CreateIndexResponse response = client().admin().indices().prepareCreate("test") .setSettings(settings) .addMapping("type", mapping).execute().actionGet(); - ClusterHealthResponse clusterHealthResponse = client().admin().cluster() - .health(Requests.clusterHealthRequest().waitForYellowStatus().timeout(TimeValue.timeValueSeconds(5))).get(); // it's OK to timeout here final int numDocs; - if (clusterHealthResponse.isTimedOut()) { + if (response.isShardsAcked() == false) { /* some seeds just won't let you create the index at all and we enter a ping-pong mode * trying one node after another etc. that is ok but we need to make sure we don't wait * forever when indexing documents so we set numDocs = 1 and expect all shards to fail * when we search below.*/ - logger.info("ClusterHealth timed out - only index one doc and expect searches to fail"); + if (response.isAcknowledged()) { + logger.info("Index creation timed out waiting for primaries to start - only index one doc and expect searches to fail"); + } else { + logger.info("Index creation failed - only index one doc and expect searches to fail"); + } numDocs = 1; } else { numDocs = between(10, 100); diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 0d6d5122006..cfff28121ba 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.document.StringField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.store.BaseDirectoryWrapper; @@ -35,15 +36,20 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.IndexShardRelocatedException; +import org.elasticsearch.index.shard.IndexShardState; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.DirectoryService; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreFileMetaData; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.CorruptionUtils; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; @@ -55,9 +61,15 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Function; +import java.util.function.Supplier; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class RecoverySourceHandlerTests extends ESTestCase { private static final IndexSettings INDEX_SETTINGS = IndexSettingsModule.newIndexSettings("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT).build()); @@ -69,12 +81,12 @@ public class RecoverySourceHandlerTests extends ESTestCase { put("indices.recovery.concurrent_small_file_streams", 1).build(); final RecoverySettings recoverySettings = new RecoverySettings(settings, service); StartRecoveryRequest request = new StartRecoveryRequest(shardId, - new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), - new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), null, RecoveryState.Type.STORE, randomLong()); Store store = newStore(createTempDir()); - RecoverySourceHandler handler = new RecoverySourceHandler(null, null, request, recoverySettings.getChunkSize().bytesAsInt(), - logger); + RecoverySourceHandler handler = new RecoverySourceHandler(null, null, request, () -> 0L, e -> () -> {}, + recoverySettings.getChunkSize().bytesAsInt(), logger); Directory dir = store.directory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, newIndexWriterConfig()); int numDocs = randomIntBetween(10, 100); @@ -119,13 +131,14 @@ public class RecoverySourceHandlerTests extends ESTestCase { put("indices.recovery.concurrent_small_file_streams", 1).build(); final RecoverySettings recoverySettings = new RecoverySettings(settings, service); StartRecoveryRequest request = new StartRecoveryRequest(shardId, - new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), - new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), null, RecoveryState.Type.STORE, randomLong()); Path tempDir = createTempDir(); Store store = newStore(tempDir, false); AtomicBoolean failedEngine = new AtomicBoolean(false); - RecoverySourceHandler handler = new RecoverySourceHandler(null, null, request, recoverySettings.getChunkSize().bytesAsInt(), logger) { + RecoverySourceHandler handler = new RecoverySourceHandler(null, null, request, () -> 0L, e -> () -> {}, + recoverySettings.getChunkSize().bytesAsInt(), logger) { @Override protected void failEngine(IOException cause) { assertFalse(failedEngine.get()); @@ -182,13 +195,14 @@ public class RecoverySourceHandlerTests extends ESTestCase { put("indices.recovery.concurrent_small_file_streams", 1).build(); final RecoverySettings recoverySettings = new RecoverySettings(settings, service); StartRecoveryRequest request = new StartRecoveryRequest(shardId, - new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), - new DiscoveryNode("b", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), null, RecoveryState.Type.STORE, randomLong()); Path tempDir = createTempDir(); Store store = newStore(tempDir, false); AtomicBoolean failedEngine = new AtomicBoolean(false); - RecoverySourceHandler handler = new RecoverySourceHandler(null, null, request, recoverySettings.getChunkSize().bytesAsInt(), logger) { + RecoverySourceHandler handler = new RecoverySourceHandler(null, null, request, () -> 0L, e -> () -> {}, + recoverySettings.getChunkSize().bytesAsInt(), logger) { @Override protected void failEngine(IOException cause) { assertFalse(failedEngine.get()); @@ -237,6 +251,99 @@ public class RecoverySourceHandlerTests extends ESTestCase { IOUtils.close(store, targetStore); } + public void testThrowExceptionOnPrimaryRelocatedBeforePhase1Completed() throws IOException { + final RecoverySettings recoverySettings = new RecoverySettings(Settings.EMPTY, service); + StartRecoveryRequest request = new StartRecoveryRequest(shardId, + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), + null, RecoveryState.Type.REPLICA, randomLong()); + IndexShard shard = mock(IndexShard.class); + Translog.View translogView = mock(Translog.View.class); + when(shard.acquireTranslogView()).thenReturn(translogView); + when(shard.state()).thenReturn(IndexShardState.RELOCATED); + AtomicBoolean phase1Called = new AtomicBoolean(); + AtomicBoolean phase2Called = new AtomicBoolean(); + RecoverySourceHandler handler = new RecoverySourceHandler(shard, null, request, () -> 0L, e -> () -> {}, + recoverySettings.getChunkSize().bytesAsInt(), logger) { + + @Override + public void phase1(final IndexCommit snapshot, final Translog.View translogView) { + phase1Called.set(true); + } + + @Override + public void phase2(Translog.Snapshot snapshot) { + phase2Called.set(true); + } + }; + expectThrows(IndexShardRelocatedException.class, () -> handler.recoverToTarget()); + assertTrue(phase1Called.get()); + assertFalse(phase2Called.get()); + } + + public void testWaitForClusterStateOnPrimaryRelocation() throws IOException, InterruptedException { + final RecoverySettings recoverySettings = new RecoverySettings(Settings.EMPTY, service); + StartRecoveryRequest request = new StartRecoveryRequest(shardId, + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), + new DiscoveryNode("b", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), + null, RecoveryState.Type.PRIMARY_RELOCATION, randomLong()); + AtomicBoolean phase1Called = new AtomicBoolean(); + AtomicBoolean phase2Called = new AtomicBoolean(); + AtomicBoolean ensureClusterStateVersionCalled = new AtomicBoolean(); + AtomicBoolean recoveriesDelayed = new AtomicBoolean(); + AtomicBoolean relocated = new AtomicBoolean(); + + IndexShard shard = mock(IndexShard.class); + Translog.View translogView = mock(Translog.View.class); + when(shard.acquireTranslogView()).thenReturn(translogView); + when(shard.state()).then(i -> relocated.get() ? IndexShardState.RELOCATED : IndexShardState.STARTED); + doAnswer(i -> { + relocated.set(true); + assertTrue(recoveriesDelayed.get()); + return null; + }).when(shard).relocated(any(String.class)); + + RecoveryTargetHandler targetHandler = mock(RecoveryTargetHandler.class); + + final Supplier currentClusterStateVersionSupplier = () -> { + assertFalse(ensureClusterStateVersionCalled.get()); + assertTrue(recoveriesDelayed.get()); + ensureClusterStateVersionCalled.set(true); + return 0L; + }; + final Function delayNewRecoveries = s -> { + assertTrue(phase1Called.get()); + assertTrue(phase2Called.get()); + + assertFalse(recoveriesDelayed.get()); + recoveriesDelayed.set(true); + return () -> { + assertTrue(recoveriesDelayed.get()); + recoveriesDelayed.set(false); + }; + }; + + RecoverySourceHandler handler = new RecoverySourceHandler(shard, targetHandler, request, currentClusterStateVersionSupplier, + delayNewRecoveries, recoverySettings.getChunkSize().bytesAsInt(), logger) { + + @Override + public void phase1(final IndexCommit snapshot, final Translog.View translogView) { + phase1Called.set(true); + } + + @Override + public void phase2(Translog.Snapshot snapshot) { + phase2Called.set(true); + } + }; + handler.recoverToTarget(); + assertTrue(ensureClusterStateVersionCalled.get()); + assertTrue(phase1Called.get()); + assertTrue(phase2Called.get()); + assertTrue(relocated.get()); + assertFalse(recoveriesDelayed.get()); + } + private Store newStore(Path path) throws IOException { return newStore(path, true); } diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java index be94f236bc9..587dc35bc50 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java @@ -36,8 +36,6 @@ import java.util.regex.Pattern; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; -/** - */ public class RecoveryStatusTests extends ESSingleNodeTestCase { public void testRenameTempFiles() throws IOException { @@ -53,7 +51,7 @@ public class RecoveryStatusTests extends ESSingleNodeTestCase { @Override public void onRecoveryFailure(RecoveryState state, RecoveryFailedException e, boolean sendShardFailure) { } - }); + }, version -> {}); try (IndexOutput indexOutput = status.openAndPutIndexOutput("foo.bar", new StoreFileMetaData("foo.bar", 8 + CodecUtil.footerLength(), "9z51nw"), status.store())) { indexOutput.writeInt(1); IndexOutput openIndexOutput = status.getOpenIndexOutput("foo.bar"); @@ -73,7 +71,7 @@ public class RecoveryStatusTests extends ESSingleNodeTestCase { Set strings = Sets.newHashSet(status.store().directory().listAll()); String expectedFile = null; for (String file : strings) { - if (Pattern.compile("recovery[.]\\d+[.]foo[.]bar").matcher(file).matches()) { + if (Pattern.compile("recovery[.][\\w-]+[.]foo[.]bar").matcher(file).matches()) { expectedFile = file; break; } diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java index 8552db2d376..d0401196b95 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoveryTargetTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.recovery.RecoveryState.File; import org.elasticsearch.indices.recovery.RecoveryState.Index; @@ -57,9 +57,9 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; public class RecoveryTargetTests extends ESTestCase { abstract class Streamer extends Thread { private T lastRead; - final private AtomicBoolean shouldStop; - final private T source; - final AtomicReference error = new AtomicReference<>(); + private final AtomicBoolean shouldStop; + private final T source; + final AtomicReference error = new AtomicReference<>(); final Version streamVersion; Streamer(AtomicBoolean shouldStop, T source) { @@ -73,7 +73,7 @@ public class RecoveryTargetTests extends ESTestCase { } public T lastRead() throws Throwable { - Throwable t = error.get(); + Exception t = error.get(); if (t != null) { throw t; } @@ -84,7 +84,7 @@ public class RecoveryTargetTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); source.writeTo(out); out.close(); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); T obj = deserialize(in); lastRead = obj; return obj; @@ -105,8 +105,8 @@ public class RecoveryTargetTests extends ESTestCase { serializeDeserialize(); } serializeDeserialize(); - } catch (Throwable t) { - error.set(t); + } catch (Exception e) { + error.set(e); } } } @@ -339,7 +339,8 @@ public class RecoveryTargetTests extends ESTestCase { } public void testStageSequenceEnforcement() { - final DiscoveryNode discoveryNode = new DiscoveryNode("1", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + final DiscoveryNode discoveryNode = new DiscoveryNode("1", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), + Version.CURRENT); Stage[] stages = Stage.values(); int i = randomIntBetween(0, stages.length - 1); int j; diff --git a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java index 81c50cc4f9c..2ad8ebb52f9 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/RareClusterStateIT.java @@ -42,7 +42,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.DiscoverySettings; @@ -126,7 +126,7 @@ public class RareClusterStateIT extends ESIntegTestCase { // inject a node ClusterState.Builder builder = ClusterState.builder(currentState); builder.nodes(DiscoveryNodes.builder(currentState.nodes()).put(new DiscoveryNode("_non_existent", - DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT))); + LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT))); // open index final IndexMetaData indexMetaData = IndexMetaData.builder(currentState.metaData().index(index)).state(IndexMetaData.State.OPEN).build(); @@ -145,7 +145,7 @@ public class RareClusterStateIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { } }); @@ -165,7 +165,7 @@ public class RareClusterStateIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { } }); @@ -260,7 +260,7 @@ public class RareClusterStateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { putMappingResponse.set(e); } }); @@ -292,7 +292,7 @@ public class RareClusterStateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { docIndexResponse.set(e); } }); @@ -376,7 +376,7 @@ public class RareClusterStateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { putMappingResponse.set(e); } }); @@ -403,7 +403,7 @@ public class RareClusterStateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { docIndexResponse.set(e); } }); diff --git a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java index ea3ebf5179b..66687ea74fa 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.ShardRoutingState; @@ -98,7 +99,7 @@ public class SimpleIndexStateIT extends ESIntegTestCase { public void testFastCloseAfterCreateContinuesCreateAfterOpen() { logger.info("--> creating test index that cannot be allocated"); - client().admin().indices().prepareCreate("test").setSettings(Settings.builder() + client().admin().indices().prepareCreate("test").setWaitForActiveShards(ActiveShardCount.NONE).setSettings(Settings.builder() .put("index.routing.allocation.include.tag", "no_such_node").build()).get(); ClusterHealthResponse health = client().admin().cluster().prepareHealth("test").setWaitForNodes(">=2").get(); diff --git a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 6bce95af184..8e88aff523c 100644 --- a/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -647,7 +647,7 @@ public class IndexStatsIT extends ESIntegTestCase { flags.writeTo(out); out.close(); BytesReference bytes = out.bytes(); - CommonStatsFlags readStats = CommonStatsFlags.readCommonStatsFlags(StreamInput.wrap(bytes)); + CommonStatsFlags readStats = CommonStatsFlags.readCommonStatsFlags(bytes.streamInput()); for (Flag flag : values) { assertThat(flags.isSet(flag), equalTo(readStats.isSet(flag))); } @@ -661,7 +661,7 @@ public class IndexStatsIT extends ESIntegTestCase { flags.writeTo(out); out.close(); BytesReference bytes = out.bytes(); - CommonStatsFlags readStats = CommonStatsFlags.readCommonStatsFlags(StreamInput.wrap(bytes)); + CommonStatsFlags readStats = CommonStatsFlags.readCommonStatsFlags(bytes.streamInput()); for (Flag flag : values) { assertThat(flags.isSet(flag), equalTo(readStats.isSet(flag))); } diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index b4f66c2e17b..ad26ec71226 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -426,7 +426,7 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { } }); waitNoPendingTasksOnAll(); diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java index e558f0f2a12..96af4ef3671 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreTests.java @@ -57,7 +57,7 @@ import static org.elasticsearch.test.VersionUtils.randomVersion; /** */ public class IndicesStoreTests extends ESTestCase { - private final static ShardRoutingState[] NOT_STARTED_STATES; + private static final ShardRoutingState[] NOT_STARTED_STATES; static { Set set = new HashSet<>(); diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index 493f8b74e04..a5ec8e4ecd7 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -35,7 +35,6 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.IndexTemplateAlreadyExistsException; import org.elasticsearch.indices.InvalidAliasNameException; -import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; @@ -116,7 +115,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { ensureGreen(); SearchResponse searchResponse = client().prepareSearch("test_index") .setQuery(termQuery("field1", "value1")) - .addField("field1").addField("field2") + .addStoredField("field1").addStoredField("field2") .execute().actionGet(); assertHitCount(searchResponse, 1); @@ -130,7 +129,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { // now only match on one template (template_1) searchResponse = client().prepareSearch("text_index") .setQuery(termQuery("field1", "value1")) - .addField("field1").addField("field2") + .addStoredField("field1").addStoredField("field2") .execute().actionGet(); if (searchResponse.getFailedShards() > 0) { logger.warn("failed search {}", Arrays.toString(searchResponse.getShardFailures())); diff --git a/core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java b/core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java index fa78d5aa16c..f2d9aaa5170 100644 --- a/core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/ConfigurationUtilsTests.java @@ -19,14 +19,6 @@ package org.elasticsearch.ingest; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.ingest.ProcessorsRegistry; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.TestThreadPool; -import org.junit.Before; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -34,11 +26,14 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.sameInstance; import static org.mockito.Mockito.mock; - public class ConfigurationUtilsTests extends ESTestCase { private Map config; @@ -97,10 +92,8 @@ public class ConfigurationUtilsTests extends ESTestCase { public void testReadProcessors() throws Exception { Processor processor = mock(Processor.class); - ProcessorsRegistry.Builder builder = new ProcessorsRegistry.Builder(); - builder.registerProcessor("test_processor", (registry) -> config -> processor); - ProcessorsRegistry registry = builder.build(mock(ScriptService.class), mock(ClusterService.class)); - + Map registry = + Collections.singletonMap("test_processor", (factories, tag, config) -> processor); List>> config = new ArrayList<>(); Map emptyConfig = Collections.emptyMap(); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java new file mode 100644 index 00000000000..08cde7e04d8 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/ingest/IngestServiceTests.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.IngestPlugin; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.mockito.Mockito; + +public class IngestServiceTests extends ESTestCase { + private final IngestPlugin DUMMY_PLUGIN = new IngestPlugin() { + @Override + public Map getProcessors(Processor.Parameters parameters) { + return Collections.singletonMap("foo", (factories, tag, config) -> null); + } + }; + + public void testIngestPlugin() { + ThreadPool tp = Mockito.mock(ThreadPool.class); + IngestService ingestService = new IngestService(Settings.EMPTY, tp, null, null, Collections.singletonList(DUMMY_PLUGIN)); + Map factories = ingestService.getPipelineStore().getProcessorFactories(); + assertTrue(factories.containsKey("foo")); + assertEquals(1, factories.size()); + } + + public void testIngestPluginDuplicate() { + ThreadPool tp = Mockito.mock(ThreadPool.class); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + new IngestService(Settings.EMPTY, tp, null, null, Arrays.asList(DUMMY_PLUGIN, DUMMY_PLUGIN)) + ); + assertTrue(e.getMessage(), e.getMessage().contains("already registered")); + } +} diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java index 119e94580ad..9974dd568a8 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestStatsTests.java @@ -62,7 +62,7 @@ public class IngestStatsTests extends ESTestCase { private IngestStats serialize(IngestStats stats) throws IOException { BytesStreamOutput out = new BytesStreamOutput(); stats.writeTo(out); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); return new IngestStats(in); } } diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index 8bf6f77a026..53964132abe 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -74,7 +74,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecuteIndexPipelineDoesNotExist() { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); try { @@ -83,7 +83,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("pipeline with id [_id] does not exist")); } - verify(failureHandler, never()).accept(any(Throwable.class)); + verify(failureHandler, never()).accept(any(Exception.class)); verify(completionHandler, never()).accept(anyBoolean()); } @@ -98,9 +98,9 @@ public class PipelineExecutionServiceTests extends ESTestCase { new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("does_not_exist"); bulkRequest.add(indexRequest2); @SuppressWarnings("unchecked") - BiConsumer failureHandler = mock(BiConsumer.class); + BiConsumer failureHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); executionService.executeBulkRequest(bulkRequest.requests(), failureHandler, completionHandler); verify(failureHandler, times(1)).accept( argThat(new CustomTypeSafeMatcher("failure handler was not called with the expected arguments") { @@ -126,7 +126,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -134,8 +134,25 @@ public class PipelineExecutionServiceTests extends ESTestCase { verify(completionHandler, times(1)).accept(true); } + public void testExecuteEmptyPipeline() throws Exception { + CompoundProcessor processor = mock(CompoundProcessor.class); + when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); + when(processor.getProcessors()).thenReturn(Collections.emptyList()); + + IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); + @SuppressWarnings("unchecked") + Consumer failureHandler = mock(Consumer.class); + @SuppressWarnings("unchecked") + Consumer completionHandler = mock(Consumer.class); + executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); + verify(processor, never()).execute(any()); + verify(failureHandler, never()).accept(any()); + verify(completionHandler, times(1)).accept(true); + } + public void testExecutePropagateAllMetaDataUpdates() throws Exception { CompoundProcessor processor = mock(CompoundProcessor.class); + when(processor.getProcessors()).thenReturn(Collections.singletonList(mock(Processor.class))); doAnswer((InvocationOnMock invocationOnMock) -> { IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; for (IngestDocument.MetaData metaData : IngestDocument.MetaData.values()) { @@ -152,7 +169,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -171,11 +188,12 @@ public class PipelineExecutionServiceTests extends ESTestCase { public void testExecuteFailure() throws Exception { CompoundProcessor processor = mock(CompoundProcessor.class); + when(processor.getProcessors()).thenReturn(Collections.singletonList(mock(Processor.class))); when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", processor)); IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -195,7 +213,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -213,7 +231,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -235,7 +253,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { doThrow(new RuntimeException()).when(onFailureProcessor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); doThrow(new RuntimeException()).when(processor).execute(eqID("_index", "_type", "_id", Collections.emptyMap())); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -250,7 +268,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -266,7 +284,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); @SuppressWarnings("unchecked") Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -280,7 +298,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id") .source(Collections.emptyMap()) .ttl(1000L); - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); Consumer completionHandler = mock(Consumer.class); executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); @@ -313,12 +331,13 @@ public class PipelineExecutionServiceTests extends ESTestCase { } CompoundProcessor processor = mock(CompoundProcessor.class); + when(processor.getProcessors()).thenReturn(Collections.singletonList(mock(Processor.class))); Exception error = new RuntimeException(); doThrow(error).when(processor).execute(any()); when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, processor)); - BiConsumer requestItemErrorHandler = mock(BiConsumer.class); - Consumer completionHandler = mock(Consumer.class); + BiConsumer requestItemErrorHandler = mock(BiConsumer.class); + Consumer completionHandler = mock(Consumer.class); executionService.executeBulkRequest(bulkRequest.requests(), requestItemErrorHandler, completionHandler); verify(requestItemErrorHandler, times(numIndexRequests)).accept(any(IndexRequest.class), eq(error)); @@ -339,9 +358,9 @@ public class PipelineExecutionServiceTests extends ESTestCase { when(store.get(pipelineId)).thenReturn(new Pipeline(pipelineId, null, new CompoundProcessor())); @SuppressWarnings("unchecked") - BiConsumer requestItemErrorHandler = mock(BiConsumer.class); + BiConsumer requestItemErrorHandler = mock(BiConsumer.class); @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); + Consumer completionHandler = mock(Consumer.class); executionService.executeBulkRequest(bulkRequest.requests(), requestItemErrorHandler, completionHandler); verify(requestItemErrorHandler, never()).accept(any(), any()); @@ -356,15 +375,15 @@ public class PipelineExecutionServiceTests extends ESTestCase { assertThat(ingestStats.getTotalStats().getIngestFailedCount(), equalTo(0L)); assertThat(ingestStats.getTotalStats().getIngestTimeInMillis(), equalTo(0L)); - when(store.get("_id1")).thenReturn(new Pipeline("_id1", null, new CompoundProcessor())); - when(store.get("_id2")).thenReturn(new Pipeline("_id2", null, new CompoundProcessor())); + when(store.get("_id1")).thenReturn(new Pipeline("_id1", null, new CompoundProcessor(mock(Processor.class)))); + when(store.get("_id2")).thenReturn(new Pipeline("_id2", null, new CompoundProcessor(mock(Processor.class)))); Map configurationMap = new HashMap<>(); configurationMap.put("_id1", new PipelineConfiguration("_id1", new BytesArray("{}"))); configurationMap.put("_id2", new PipelineConfiguration("_id2", new BytesArray("{}"))); executionService.updatePipelineStats(new IngestMetadata(configurationMap)); - Consumer failureHandler = mock(Consumer.class); + Consumer failureHandler = mock(Consumer.class); Consumer completionHandler = mock(Consumer.class); IndexRequest indexRequest = new IndexRequest("_index"); diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java index cb7bd849a47..b09d772729c 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineFactoryTests.java @@ -31,6 +31,7 @@ import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; @@ -40,13 +41,13 @@ public class PipelineFactoryTests extends ESTestCase { public void testCreate() throws Exception { Map processorConfig0 = new HashMap<>(); Map processorConfig1 = new HashMap<>(); - processorConfig0.put(AbstractProcessorFactory.TAG_KEY, "first-processor"); + processorConfig0.put(ConfigurationUtils.TAG_KEY, "first-processor"); Map pipelineConfig = new HashMap<>(); pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); pipelineConfig.put(Pipeline.PROCESSORS_KEY, Arrays.asList(Collections.singletonMap("test", processorConfig0), Collections.singletonMap("test", processorConfig1))); Pipeline.Factory factory = new Pipeline.Factory(); - ProcessorsRegistry processorRegistry = createProcessorRegistry(Collections.singletonMap("test", new TestProcessor.Factory())); + Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); assertThat(pipeline.getId(), equalTo("_id")); assertThat(pipeline.getDescription(), equalTo("_description")); @@ -62,13 +63,24 @@ public class PipelineFactoryTests extends ESTestCase { pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); Pipeline.Factory factory = new Pipeline.Factory(); try { - factory.create("_id", pipelineConfig, createProcessorRegistry(Collections.emptyMap())); + factory.create("_id", pipelineConfig, Collections.emptyMap()); fail("should fail, missing required [processors] field"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[processors] required property is missing")); } } + public void testCreateWithEmptyProcessorsField() throws Exception { + Map pipelineConfig = new HashMap<>(); + pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); + pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.emptyList()); + Pipeline.Factory factory = new Pipeline.Factory(); + Pipeline pipeline = factory.create("_id", pipelineConfig, null); + assertThat(pipeline.getId(), equalTo("_id")); + assertThat(pipeline.getDescription(), equalTo("_description")); + assertThat(pipeline.getProcessors(), is(empty())); + } + public void testCreateWithPipelineOnFailure() throws Exception { Map processorConfig = new HashMap<>(); Map pipelineConfig = new HashMap<>(); @@ -76,7 +88,7 @@ public class PipelineFactoryTests extends ESTestCase { pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig))); pipelineConfig.put(Pipeline.ON_FAILURE_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig))); Pipeline.Factory factory = new Pipeline.Factory(); - ProcessorsRegistry processorRegistry = createProcessorRegistry(Collections.singletonMap("test", new TestProcessor.Factory())); + Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); assertThat(pipeline.getId(), equalTo("_id")); assertThat(pipeline.getDescription(), equalTo("_description")); @@ -93,7 +105,7 @@ public class PipelineFactoryTests extends ESTestCase { pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig))); pipelineConfig.put(Pipeline.ON_FAILURE_KEY, Collections.emptyList()); Pipeline.Factory factory = new Pipeline.Factory(); - ProcessorsRegistry processorRegistry = createProcessorRegistry(Collections.singletonMap("test", new TestProcessor.Factory())); + Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create("_id", pipelineConfig, processorRegistry)); assertThat(e.getMessage(), equalTo("pipeline [_id] cannot have an empty on_failure option defined")); } @@ -105,7 +117,7 @@ public class PipelineFactoryTests extends ESTestCase { pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig))); Pipeline.Factory factory = new Pipeline.Factory(); - ProcessorsRegistry processorRegistry = createProcessorRegistry(Collections.singletonMap("test", new TestProcessor.Factory())); + Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create("_id", pipelineConfig, processorRegistry)); assertThat(e.getMessage(), equalTo("[on_failure] processors list cannot be empty")); } @@ -114,7 +126,7 @@ public class PipelineFactoryTests extends ESTestCase { Map processorConfig = new HashMap<>(); processorConfig.put("ignore_failure", true); - ProcessorsRegistry processorRegistry = createProcessorRegistry(Collections.singletonMap("test", new TestProcessor.Factory())); + Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); Pipeline.Factory factory = new Pipeline.Factory(); Map pipelineConfig = new HashMap<>(); pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); @@ -139,7 +151,7 @@ public class PipelineFactoryTests extends ESTestCase { pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig))); Pipeline.Factory factory = new Pipeline.Factory(); - ProcessorsRegistry processorRegistry = createProcessorRegistry(Collections.singletonMap("test", new TestProcessor.Factory())); + Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); Exception e = expectThrows(ElasticsearchParseException.class, () -> factory.create("_id", pipelineConfig, processorRegistry)); assertThat(e.getMessage(), equalTo("processor [test] doesn't support one or more provided configuration parameters [unused]")); } @@ -152,7 +164,7 @@ public class PipelineFactoryTests extends ESTestCase { pipelineConfig.put(Pipeline.DESCRIPTION_KEY, "_description"); pipelineConfig.put(Pipeline.PROCESSORS_KEY, Collections.singletonList(Collections.singletonMap("test", processorConfig))); Pipeline.Factory factory = new Pipeline.Factory(); - ProcessorsRegistry processorRegistry = createProcessorRegistry(Collections.singletonMap("test", new TestProcessor.Factory())); + Map processorRegistry = Collections.singletonMap("test", new TestProcessor.Factory()); Pipeline pipeline = factory.create("_id", pipelineConfig, processorRegistry); assertThat(pipeline.getId(), equalTo("_id")); assertThat(pipeline.getDescription(), equalTo("_description")); @@ -169,12 +181,4 @@ public class PipelineFactoryTests extends ESTestCase { List flattened = pipeline.flattenAllProcessors(); assertThat(flattened.size(), equalTo(4)); } - - private ProcessorsRegistry createProcessorRegistry(Map processorRegistry) { - ProcessorsRegistry.Builder builder = new ProcessorsRegistry.Builder(); - for (Map.Entry entry : processorRegistry.entrySet()) { - builder.registerProcessor(entry.getKey(), ((registry) -> entry.getValue())); - } - return builder.build(mock(ScriptService.class), mock(ClusterService.class)); - } } diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java index 55ea4360ece..1510d25b695 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineStoreTests.java @@ -56,9 +56,8 @@ public class PipelineStoreTests extends ESTestCase { @Before public void init() throws Exception { - store = new PipelineStore(Settings.EMPTY); - ProcessorsRegistry.Builder registryBuilder = new ProcessorsRegistry.Builder(); - registryBuilder.registerProcessor("set", (registry) -> config -> { + Map processorFactories = new HashMap<>(); + processorFactories.put("set", (factories, tag, config) -> { String field = (String) config.remove("field"); String value = (String) config.remove("value"); return new Processor() { @@ -78,7 +77,7 @@ public class PipelineStoreTests extends ESTestCase { } }; }); - registryBuilder.registerProcessor("remove", (registry) -> config -> { + processorFactories.put("remove", (factories, tag, config) -> { String field = (String) config.remove("field"); return new Processor() { @Override @@ -97,7 +96,7 @@ public class PipelineStoreTests extends ESTestCase { } }; }); - store.buildProcessorFactoryRegistry(registryBuilder, mock(ScriptService.class), mock(ClusterService.class)); + store = new PipelineStore(Settings.EMPTY, processorFactories); } public void testUpdatePipelines() { @@ -259,7 +258,7 @@ public class PipelineStoreTests extends ESTestCase { store.validatePipeline(ingestInfos, putRequest); fail("exception expected"); } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Processor type [remove] is not installed on node [{_node_id2}{local}{local[_id]}]")); + assertThat(e.getMessage(), equalTo("Processor type [remove] is not installed on node [" + node2 + "]")); } ingestInfos.put(node2, new IngestInfo(Arrays.asList(new ProcessorInfo("set"), new ProcessorInfo("remove")))); diff --git a/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java b/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java deleted file mode 100644 index 41701102584..00000000000 --- a/core/src/test/java/org/elasticsearch/ingest/ProcessorsRegistryTests.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ingest; - -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.test.ESTestCase; - -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.sameInstance; -import static org.mockito.Mockito.mock; - -public class ProcessorsRegistryTests extends ESTestCase { - - public void testBuildProcessorRegistry() { - ProcessorsRegistry.Builder builder = new ProcessorsRegistry.Builder(); - TestProcessor.Factory factory1 = new TestProcessor.Factory(); - builder.registerProcessor("1", (registry) -> factory1); - TestProcessor.Factory factory2 = new TestProcessor.Factory(); - builder.registerProcessor("2", (registry) -> factory2); - TestProcessor.Factory factory3 = new TestProcessor.Factory(); - try { - builder.registerProcessor("1", (registry) -> factory3); - fail("addProcessor should have failed"); - } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), equalTo("Processor factory already registered for name [1]")); - } - - ProcessorsRegistry registry = builder.build(mock(ScriptService.class), mock(ClusterService.class)); - assertThat(registry.getProcessorFactories().size(), equalTo(2)); - assertThat(registry.getProcessorFactory("1"), sameInstance(factory1)); - assertThat(registry.getProcessorFactory("2"), sameInstance(factory2)); - } -} diff --git a/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java b/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java index fd332321618..9841b5ba2b3 100644 --- a/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java +++ b/core/src/test/java/org/elasticsearch/mget/SimpleMgetIT.java @@ -44,7 +44,6 @@ import static org.hamcrest.Matchers.nullValue; public class SimpleMgetIT extends ESIntegTestCase { public void testThatMgetShouldWorkWithOneIndexMissing() throws IOException { createIndex("test"); - ensureYellow(); client().prepareIndex("test", "test", "1").setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) .setRefreshPolicy(IMMEDIATE).get(); @@ -86,7 +85,6 @@ public class SimpleMgetIT extends ESIntegTestCase { .endObject() .endObject() .endObject())); - ensureYellow(); client().prepareIndex("test", "test", "1").setParent("4").setRefreshPolicy(IMMEDIATE) .setSource(jsonBuilder().startObject().field("foo", "bar").endObject()) @@ -109,7 +107,6 @@ public class SimpleMgetIT extends ESIntegTestCase { @SuppressWarnings("unchecked") public void testThatSourceFilteringIsSupported() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias"))); - ensureYellow(); BytesReference sourceBytesRef = jsonBuilder().startObject() .field("field", "1", "2") .startObject("included").field("field", "should be seen").field("hidden_field", "should not be seen").endObject() @@ -151,7 +148,6 @@ public class SimpleMgetIT extends ESIntegTestCase { .setSettings(Settings.builder() .put(indexSettings()) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, between(2, DEFAULT_MAX_NUM_SHARDS)))); - ensureYellow(); final String id = routingKeyForShard("test", 0); final String routingOtherShard = routingKeyForShard("test", 1); diff --git a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java index a1f4d381911..dc374f13339 100644 --- a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmGcMonitorServiceSettingsTests.java @@ -24,14 +24,13 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.threadpool.ThreadPool.Cancellable; import java.util.AbstractMap; import java.util.HashSet; import java.util.Set; -import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.BiFunction; import java.util.function.Consumer; import static org.hamcrest.CoreMatchers.allOf; @@ -42,21 +41,25 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { public void testEmptySettingsAreOkay() throws InterruptedException { AtomicBoolean scheduled = new AtomicBoolean(); - execute(Settings.EMPTY, (command, interval) -> { scheduled.set(true); return null; }, () -> assertTrue(scheduled.get())); + execute(Settings.EMPTY, + (command, interval, name) -> { scheduled.set(true); return new MockCancellable(); }, + () -> assertTrue(scheduled.get())); } public void testDisabledSetting() throws InterruptedException { Settings settings = Settings.builder().put("monitor.jvm.gc.enabled", "false").build(); AtomicBoolean scheduled = new AtomicBoolean(); - execute(settings, (command, interval) -> { scheduled.set(true); return null; }, () -> assertFalse(scheduled.get())); + execute(settings, + (command, interval, name) -> { scheduled.set(true); return new MockCancellable(); }, + () -> assertFalse(scheduled.get())); } public void testNegativeSetting() throws InterruptedException { String collector = randomAsciiOfLength(5); Settings settings = Settings.builder().put("monitor.jvm.gc.collector." + collector + ".warn", "-" + randomTimeValue()).build(); - execute(settings, (command, interval) -> null, t -> { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), allOf(containsString("invalid gc_threshold"), containsString("for [monitor.jvm.gc.collector." + collector + "."))); + execute(settings, (command, interval, name) -> null, e -> { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), allOf(containsString("invalid gc_threshold"), containsString("for [monitor.jvm.gc.collector." + collector + "."))); }, true, null); } @@ -74,9 +77,9 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { } // we should get an exception that a setting is missing - execute(builder.build(), (command, interval) -> null, t -> { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), containsString("missing gc_threshold for [monitor.jvm.gc.collector." + collector + ".")); + execute(builder.build(), (command, interval, name) -> null, e -> { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), containsString("missing gc_threshold for [monitor.jvm.gc.collector." + collector + ".")); }, true, null); } @@ -84,18 +87,18 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { for (final String threshold : new String[] { "warn", "info", "debug" }) { final Settings.Builder builder = Settings.builder(); builder.put("monitor.jvm.gc.overhead." + threshold, randomIntBetween(Integer.MIN_VALUE, -1)); - execute(builder.build(), (command, interval) -> null, t -> { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), containsString("setting [monitor.jvm.gc.overhead." + threshold + "] must be >= 0")); + execute(builder.build(), (command, interval, name) -> null, e -> { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), containsString("setting [monitor.jvm.gc.overhead." + threshold + "] must be >= 0")); }, true, null); } for (final String threshold : new String[] { "warn", "info", "debug" }) { final Settings.Builder builder = Settings.builder(); builder.put("monitor.jvm.gc.overhead." + threshold, randomIntBetween(100 + 1, Integer.MAX_VALUE)); - execute(builder.build(), (command, interval) -> null, t -> { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), containsString("setting [monitor.jvm.gc.overhead." + threshold + "] must be <= 100")); + execute(builder.build(), (command, interval, name) -> null, e -> { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), containsString("setting [monitor.jvm.gc.overhead." + threshold + "] must be <= 100")); }, true, null); } @@ -104,9 +107,9 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { infoWarnOutOfOrderBuilder.put("monitor.jvm.gc.overhead.info", info); final int warn = randomIntBetween(1, info - 1); infoWarnOutOfOrderBuilder.put("monitor.jvm.gc.overhead.warn", warn); - execute(infoWarnOutOfOrderBuilder.build(), (command, interval) -> null, t -> { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), containsString("[monitor.jvm.gc.overhead.warn] must be greater than [monitor.jvm.gc.overhead.info] [" + info + "] but was [" + warn + "]")); + execute(infoWarnOutOfOrderBuilder.build(), (command, interval, name) -> null, e -> { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), containsString("[monitor.jvm.gc.overhead.warn] must be greater than [monitor.jvm.gc.overhead.info] [" + info + "] but was [" + warn + "]")); }, true, null); final Settings.Builder debugInfoOutOfOrderBuilder = Settings.builder(); @@ -114,25 +117,25 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { final int debug = randomIntBetween(info + 1, 99); debugInfoOutOfOrderBuilder.put("monitor.jvm.gc.overhead.debug", debug); debugInfoOutOfOrderBuilder.put("monitor.jvm.gc.overhead.warn", randomIntBetween(debug + 1, 100)); // or the test will fail for the wrong reason - execute(debugInfoOutOfOrderBuilder.build(), (command, interval) -> null, t -> { - assertThat(t, instanceOf(IllegalArgumentException.class)); - assertThat(t.getMessage(), containsString("[monitor.jvm.gc.overhead.info] must be greater than [monitor.jvm.gc.overhead.debug] [" + debug + "] but was [" + info + "]")); + execute(debugInfoOutOfOrderBuilder.build(), (command, interval, name) -> null, e -> { + assertThat(e, instanceOf(IllegalArgumentException.class)); + assertThat(e.getMessage(), containsString("[monitor.jvm.gc.overhead.info] must be greater than [monitor.jvm.gc.overhead.debug] [" + debug + "] but was [" + info + "]")); }, true, null); } - private static void execute(Settings settings, BiFunction> scheduler, Runnable asserts) throws InterruptedException { + private static void execute(Settings settings, TriFunction scheduler, Runnable asserts) throws InterruptedException { execute(settings, scheduler, null, false, asserts); } - private static void execute(Settings settings, BiFunction> scheduler, Consumer consumer, boolean constructionShouldFail, Runnable asserts) throws InterruptedException { + private static void execute(Settings settings, TriFunction scheduler, Consumer consumer, boolean constructionShouldFail, Runnable asserts) throws InterruptedException { assert constructionShouldFail == (consumer != null); assert constructionShouldFail == (asserts == null); ThreadPool threadPool = null; try { threadPool = new TestThreadPool(JvmGcMonitorServiceSettingsTests.class.getCanonicalName()) { @Override - public ScheduledFuture scheduleWithFixedDelay(Runnable command, TimeValue interval) { - return scheduler.apply(command, interval); + public Cancellable scheduleWithFixedDelay(Runnable command, TimeValue interval, String name) { + return scheduler.apply(command, interval, name); } }; try { @@ -143,7 +146,7 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { service.doStart(); asserts.run(); service.doStop(); - } catch (Throwable t) { + } catch (Exception t) { consumer.accept(t); } } finally { @@ -151,4 +154,19 @@ public class JvmGcMonitorServiceSettingsTests extends ESTestCase { } } + interface TriFunction { + R apply(S s, T t, U u); + } + + private static class MockCancellable implements Cancellable { + + @Override + public void cancel() { + } + + @Override + public boolean isCancelled() { + return false; + } + } } diff --git a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmMonitorTests.java b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmMonitorTests.java index 91862e9cd18..278a47ed21f 100644 --- a/core/src/test/java/org/elasticsearch/monitor/jvm/JvmMonitorTests.java +++ b/core/src/test/java/org/elasticsearch/monitor/jvm/JvmMonitorTests.java @@ -48,10 +48,10 @@ public class JvmMonitorTests extends ESTestCase { AtomicBoolean invoked = new AtomicBoolean(); JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(Collections.emptyMap(), IGNORE) { @Override - void onMonitorFailure(Throwable t) { + void onMonitorFailure(Exception e) { invoked.set(true); - assertThat(t, instanceOf(RuntimeException.class)); - assertThat(t, hasToString(containsString("simulated"))); + assertThat(e, instanceOf(RuntimeException.class)); + assertThat(e, hasToString(containsString("simulated"))); } @Override @@ -174,7 +174,7 @@ public class JvmMonitorTests extends ESTestCase { JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(gcThresholds, IGNORE) { @Override - void onMonitorFailure(Throwable t) { + void onMonitorFailure(Exception e) { } @Override @@ -284,7 +284,7 @@ public class JvmMonitorTests extends ESTestCase { final JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(Collections.emptyMap(), IGNORE) { @Override - void onMonitorFailure(Throwable t) { + void onMonitorFailure(Exception e) { } @Override @@ -358,7 +358,7 @@ public class JvmMonitorTests extends ESTestCase { final JvmGcMonitorService.JvmMonitor monitor = new JvmGcMonitorService.JvmMonitor(Collections.emptyMap(), gcOverheadThreshold) { @Override - void onMonitorFailure(final Throwable t) { + void onMonitorFailure(final Exception e) { } @Override diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index 0d07bcf0981..090517adfcd 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -28,7 +28,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.ToXContent; @@ -70,7 +70,7 @@ public class NodeInfoStreamingTests extends ESTestCase { out.setVersion(version); nodeInfo.writeTo(out); out.close(); - StreamInput in = StreamInput.wrap(out.bytes()); + StreamInput in = out.bytes().streamInput(); in.setVersion(version); NodeInfo readNodeInfo = NodeInfo.readNodeInfo(in); assertExpectedUnchanged(nodeInfo, readNodeInfo); @@ -81,11 +81,6 @@ public class NodeInfoStreamingTests extends ESTestCase { assertThat(nodeInfo.getBuild().toString(), equalTo(readNodeInfo.getBuild().toString())); assertThat(nodeInfo.getHostname(), equalTo(readNodeInfo.getHostname())); assertThat(nodeInfo.getVersion(), equalTo(readNodeInfo.getVersion())); - assertThat(nodeInfo.getServiceAttributes().size(), equalTo(readNodeInfo.getServiceAttributes().size())); - for (Map.Entry entry : nodeInfo.getServiceAttributes().entrySet()) { - assertNotNull(readNodeInfo.getServiceAttributes().get(entry.getKey())); - assertThat(readNodeInfo.getServiceAttributes().get(entry.getKey()), equalTo(entry.getValue())); - } compareJsonOutput(nodeInfo.getHttp(), readNodeInfo.getHttp()); compareJsonOutput(nodeInfo.getJvm(), readNodeInfo.getJvm()); compareJsonOutput(nodeInfo.getProcess(), readNodeInfo.getProcess()); @@ -122,7 +117,7 @@ public class NodeInfoStreamingTests extends ESTestCase { private NodeInfo createNodeInfo() { Build build = Build.CURRENT; - DiscoveryNode node = new DiscoveryNode("test_node", DummyTransportAddress.INSTANCE, + DiscoveryNode node = new DiscoveryNode("test_node", LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), VersionUtils.randomVersion(random())); Map serviceAttributes = new HashMap<>(); serviceAttributes.put("test", "attribute"); @@ -134,7 +129,7 @@ public class NodeInfoStreamingTests extends ESTestCase { threadPoolInfos.add(new ThreadPool.Info("test_threadpool", ThreadPool.ThreadPoolType.FIXED, 5)); ThreadPoolInfo threadPoolInfo = new ThreadPoolInfo(threadPoolInfos); Map profileAddresses = new HashMap<>(); - BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress(new TransportAddress[]{DummyTransportAddress.INSTANCE}, DummyTransportAddress.INSTANCE); + BoundTransportAddress dummyBoundTransportAddress = new BoundTransportAddress(new TransportAddress[]{LocalTransportAddress.buildUnique()}, LocalTransportAddress.buildUnique()); profileAddresses.put("test_address", dummyBoundTransportAddress); TransportInfo transport = new TransportInfo(dummyBoundTransportAddress, profileAddresses); HttpInfo htttpInfo = new HttpInfo(dummyBoundTransportAddress, randomLong()); @@ -149,6 +144,7 @@ public class NodeInfoStreamingTests extends ESTestCase { // pick a random long that sometimes exceeds an int: indexingBuffer = new ByteSizeValue(random().nextLong() & ((1L<<40)-1)); } - return new NodeInfo(VersionUtils.randomVersion(random()), build, node, serviceAttributes, settings, osInfo, process, jvm, threadPoolInfo, transport, htttpInfo, plugins, ingestInfo, indexingBuffer); + return new NodeInfo(VersionUtils.randomVersion(random()), build, node, settings, osInfo, process, jvm, + threadPoolInfo, transport, htttpInfo, plugins, ingestInfo, indexingBuffer); } } diff --git a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java index 0cc30f8d569..0916cad60d5 100644 --- a/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java +++ b/core/src/test/java/org/elasticsearch/nodesinfo/SimpleNodesInfoIT.java @@ -24,9 +24,9 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.ESIntegTestCase; import java.util.List; diff --git a/core/src/test/java/org/elasticsearch/operateAllIndices/DestructiveOperationsIntegrationIT.java b/core/src/test/java/org/elasticsearch/operateAllIndices/DestructiveOperationsIntegrationIT.java index 514b1757e41..83f600a8c2f 100644 --- a/core/src/test/java/org/elasticsearch/operateAllIndices/DestructiveOperationsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/operateAllIndices/DestructiveOperationsIntegrationIT.java @@ -74,7 +74,6 @@ public class DestructiveOperationsIntegrationIT extends ESIntegTestCase { assertAcked(client().admin().indices().prepareCreate("index1").get()); assertAcked(client().admin().indices().prepareCreate("1index").get()); - ensureYellow();// wait for primaries to be allocated // Should succeed, since no wildcards assertAcked(client().admin().indices().prepareClose("1index").get()); diff --git a/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java b/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java index 8c6b71c9eac..26872ca11b5 100644 --- a/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/FullRollingRestartIT.java @@ -121,7 +121,7 @@ public class FullRollingRestartIT extends ESIntegTestCase { setMinimumMasterNodes(1); internalCluster().stopRandomDataNode(); - // make sure the cluster state is green, and all has been recovered + // make sure the cluster state is yellow, and all has been recovered assertTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout(healthTimeout).setWaitForYellowStatus().setWaitForRelocatingShards(0).setWaitForNodes("1")); logger.info("--> one node left, verifying data"); diff --git a/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java b/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java index 2564b31488b..582be02d457 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java +++ b/core/src/test/java/org/elasticsearch/recovery/RecoveriesCollectionTests.java @@ -18,36 +18,26 @@ */ package org.elasticsearch.recovery; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.replication.ESIndexLevelReplicationTestCase; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoveriesCollection; import org.elasticsearch.indices.recovery.RecoveryFailedException; import org.elasticsearch.indices.recovery.RecoveryState; -import org.elasticsearch.indices.recovery.RecoveryTarget; import org.elasticsearch.indices.recovery.RecoveryTargetService; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import java.util.ArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Predicate; -import static java.util.Collections.emptyMap; -import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThan; -public class RecoveriesCollectionTests extends ESSingleNodeTestCase { - final static RecoveryTargetService.RecoveryListener listener = new RecoveryTargetService.RecoveryListener() { +public class RecoveriesCollectionTests extends ESIndexLevelReplicationTestCase { + static final RecoveryTargetService.RecoveryListener listener = new RecoveryTargetService.RecoveryListener() { @Override public void onRecoveryDone(RecoveryState state) { @@ -60,82 +50,115 @@ public class RecoveriesCollectionTests extends ESSingleNodeTestCase { }; public void testLastAccessTimeUpdate() throws Exception { - createIndex(); - final RecoveriesCollection collection = new RecoveriesCollection(logger, getInstanceFromNode(ThreadPool.class)); - final long recoveryId = startRecovery(collection); - try (RecoveriesCollection.RecoveryRef status = collection.getRecovery(recoveryId)) { - final long lastSeenTime = status.status().lastAccessTime(); - assertBusy(new Runnable() { - @Override - public void run() { + try (ReplicationGroup shards = createGroup(0)) { + final RecoveriesCollection collection = new RecoveriesCollection(logger, threadPool, v -> {}); + final long recoveryId = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica()); + try (RecoveriesCollection.RecoveryRef status = collection.getRecovery(recoveryId)) { + final long lastSeenTime = status.status().lastAccessTime(); + assertBusy(() -> { try (RecoveriesCollection.RecoveryRef currentStatus = collection.getRecovery(recoveryId)) { assertThat("access time failed to update", lastSeenTime, lessThan(currentStatus.status().lastAccessTime())); } - } - }); - } finally { - collection.cancelRecovery(recoveryId, "life"); + }); + } finally { + collection.cancelRecovery(recoveryId, "life"); + } } } - public void testRecoveryTimeout() throws InterruptedException { - createIndex(); - final RecoveriesCollection collection = new RecoveriesCollection(logger, getInstanceFromNode(ThreadPool.class)); - final AtomicBoolean failed = new AtomicBoolean(); - final CountDownLatch latch = new CountDownLatch(1); - final long recoveryId = startRecovery(collection, new RecoveryTargetService.RecoveryListener() { - @Override - public void onRecoveryDone(RecoveryState state) { - latch.countDown(); - } + public void testRecoveryTimeout() throws Exception { + try (ReplicationGroup shards = createGroup(0)) { + final RecoveriesCollection collection = new RecoveriesCollection(logger, threadPool, v -> {}); + final AtomicBoolean failed = new AtomicBoolean(); + final CountDownLatch latch = new CountDownLatch(1); + final long recoveryId = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica(), + new RecoveryTargetService.RecoveryListener() { + @Override + public void onRecoveryDone(RecoveryState state) { + latch.countDown(); + } - @Override - public void onRecoveryFailure(RecoveryState state, RecoveryFailedException e, boolean sendShardFailure) { - failed.set(true); - latch.countDown(); + @Override + public void onRecoveryFailure(RecoveryState state, RecoveryFailedException e, boolean sendShardFailure) { + failed.set(true); + latch.countDown(); + } + }, TimeValue.timeValueMillis(100)); + try { + latch.await(30, TimeUnit.SECONDS); + assertTrue("recovery failed to timeout", failed.get()); + } finally { + collection.cancelRecovery(recoveryId, "meh"); } - }, TimeValue.timeValueMillis(100)); - try { - latch.await(30, TimeUnit.SECONDS); - assertTrue("recovery failed to timeout", failed.get()); - } finally { - collection.cancelRecovery(recoveryId, "meh"); } } public void testRecoveryCancellation() throws Exception { - createIndex(); - final RecoveriesCollection collection = new RecoveriesCollection(logger, getInstanceFromNode(ThreadPool.class)); - final long recoveryId = startRecovery(collection); - final long recoveryId2 = startRecovery(collection); - try (RecoveriesCollection.RecoveryRef recoveryRef = collection.getRecovery(recoveryId)) { - ShardId shardId = recoveryRef.status().shardId(); - assertTrue("failed to cancel recoveries", collection.cancelRecoveriesForShard(shardId, "test")); - assertThat("all recoveries should be cancelled", collection.size(), equalTo(0)); - } finally { - collection.cancelRecovery(recoveryId, "meh"); - collection.cancelRecovery(recoveryId2, "meh"); + try (ReplicationGroup shards = createGroup(0)) { + final RecoveriesCollection collection = new RecoveriesCollection(logger, threadPool, v -> {}); + final long recoveryId = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica()); + final long recoveryId2 = startRecovery(collection, shards.getPrimaryNode(), shards.addReplica()); + try (RecoveriesCollection.RecoveryRef recoveryRef = collection.getRecovery(recoveryId)) { + ShardId shardId = recoveryRef.status().shardId(); + assertTrue("failed to cancel recoveries", collection.cancelRecoveriesForShard(shardId, "test")); + assertThat("all recoveries should be cancelled", collection.size(), equalTo(0)); + } finally { + collection.cancelRecovery(recoveryId, "meh"); + collection.cancelRecovery(recoveryId2, "meh"); + } } } - protected void createIndex() { - createIndex("test", - Settings.builder() - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .build()); - ensureGreen(); + public void testResetRecovery() throws Exception { + try (ReplicationGroup shards = createGroup(0)) { + shards.startAll(); + int numDocs = randomIntBetween(1, 15); + shards.indexDocs(numDocs); + final RecoveriesCollection collection = new RecoveriesCollection(logger, threadPool, v -> {}); + IndexShard shard = shards.addReplica(); + final long recoveryId = startRecovery(collection, shards.getPrimaryNode(), shard); + try (RecoveriesCollection.RecoveryRef recovery = collection.getRecovery(recoveryId)) { + final int currentAsTarget = shard.recoveryStats().currentAsTarget(); + final int referencesToStore = recovery.status().store().refCount(); + String tempFileName = recovery.status().getTempNameForFile("foobar"); + collection.resetRecovery(recoveryId, recovery.status().shardId()); + try (RecoveriesCollection.RecoveryRef resetRecovery = collection.getRecovery(recoveryId)) { + assertNotSame(recovery.status(), resetRecovery); + assertSame(recovery.status().CancellableThreads(), resetRecovery.status().CancellableThreads()); + assertSame(recovery.status().indexShard(), resetRecovery.status().indexShard()); + assertSame(recovery.status().store(), resetRecovery.status().store()); + assertEquals(referencesToStore + 1, resetRecovery.status().store().refCount()); + assertEquals(currentAsTarget+1, shard.recoveryStats().currentAsTarget()); // we blink for a short moment... + recovery.close(); + expectThrows(ElasticsearchException.class, () -> recovery.status().store()); + assertEquals(referencesToStore, resetRecovery.status().store().refCount()); + String resetTempFileName = resetRecovery.status().getTempNameForFile("foobar"); + assertNotEquals(tempFileName, resetTempFileName); + } + assertEquals(currentAsTarget, shard.recoveryStats().currentAsTarget()); + } + try (RecoveriesCollection.RecoveryRef resetRecovery = collection.getRecovery(recoveryId)) { + shards.recoverReplica(shard, (s, n) -> { + assertSame(s, resetRecovery.status().indexShard()); + return resetRecovery.status(); + }, false); + } + shards.assertAllEqual(numDocs); + assertNull("recovery is done", collection.getRecovery(recoveryId)); + } } - - long startRecovery(RecoveriesCollection collection) { - return startRecovery(collection, listener, TimeValue.timeValueMinutes(60)); + long startRecovery(RecoveriesCollection collection, DiscoveryNode sourceNode, IndexShard shard) { + return startRecovery(collection,sourceNode, shard, listener, TimeValue.timeValueMinutes(60)); } - long startRecovery(RecoveriesCollection collection, RecoveryTargetService.RecoveryListener listener, TimeValue timeValue) { - IndicesService indexServices = getInstanceFromNode(IndicesService.class); - IndexShard indexShard = indexServices.indexServiceSafe(resolveIndex("test")).getShardOrNull(0); - final DiscoveryNode sourceNode = new DiscoveryNode("id", DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT); + long startRecovery(RecoveriesCollection collection, DiscoveryNode sourceNode, IndexShard indexShard, + RecoveryTargetService.RecoveryListener listener, TimeValue timeValue) { + final DiscoveryNode rNode = getDiscoveryNode(indexShard.routingEntry().currentNodeId()); + indexShard.markAsRecovering("remote", new RecoveryState(indexShard.shardId(), false, RecoveryState.Type.REPLICA, sourceNode, + rNode)); + indexShard.prepareForIndexRecovery(); return collection.startRecovery(indexShard, sourceNode, listener, timeValue); } } diff --git a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java index 620dfeb94c2..c95f57d20dc 100644 --- a/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/RelocationIT.java @@ -22,6 +22,7 @@ package org.elasticsearch.recovery; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.procedures.IntProcedure; import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.English; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -93,8 +94,6 @@ import static org.hamcrest.Matchers.startsWith; public class RelocationIT extends ESIntegTestCase { private final TimeValue ACCEPTABLE_RELOCATION_TIME = new TimeValue(5, TimeUnit.MINUTES); - - @Override protected Collection> nodePlugins() { return pluginList(MockTransportService.TestPlugin.class, MockIndexEventListener.TestPlugin.class); @@ -219,7 +218,7 @@ public class RelocationIT extends ESIntegTestCase { for (int i = 0; i < 10; i++) { try { logger.info("--> START search test round {}", i + 1); - SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).setNoFields().execute().actionGet().getHits(); + SearchHits hits = client().prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).setNoStoredFields().execute().actionGet().getHits(); ranOnce = true; if (hits.totalHits() != indexer.totalIndexedDocs()) { int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; @@ -274,9 +273,6 @@ public class RelocationIT extends ESIntegTestCase { .put("index.refresh_interval", -1) // we want to control refreshes c ).execute().actionGet(); - // make sure the first shard is started. - ensureYellow(); - for (int i = 1; i < numberOfNodes; i++) { logger.info("--> starting [node_{}] ...", i); nodes[i] = internalCluster().startNode(); @@ -433,23 +429,23 @@ public class RelocationIT extends ESIntegTestCase { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/18553") public void testIndexAndRelocateConcurrently() throws ExecutionException, InterruptedException { + int halfNodes = randomIntBetween(1, 3); Settings blueSetting = Settings.builder().put("node.attr.color", "blue").build(); - InternalTestCluster.Async> blueFuture = internalCluster().startNodesAsync(blueSetting, blueSetting); + InternalTestCluster.Async> blueFuture = internalCluster().startNodesAsync(halfNodes, blueSetting); Settings redSetting = Settings.builder().put("node.attr.color", "red").build(); - InternalTestCluster.Async> redFuture = internalCluster().startNodesAsync(redSetting, redSetting); + InternalTestCluster.Async> redFuture = internalCluster().startNodesAsync(halfNodes, redSetting); blueFuture.get(); redFuture.get(); logger.info("blue nodes: {}", blueFuture.get()); logger.info("red nodes: {}", redFuture.get()); - ensureStableCluster(4); + ensureStableCluster(halfNodes * 2); assertAcked(prepareCreate("test").setSettings(Settings.builder() .put("index.routing.allocation.exclude.color", "blue") - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(indexSettings()))); - ensureYellow(); + .put(indexSettings()) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomInt(halfNodes - 1)) + )); assertAllShardsOnNodes("test", redFuture.get().toArray(new String[2])); int numDocs = randomIntBetween(100, 150); ArrayList ids = new ArrayList<>(); @@ -480,9 +476,11 @@ public class RelocationIT extends ESIntegTestCase { numDocs *= 2; logger.info(" --> waiting for relocation to complete"); - ensureGreen("test");// move all shards to the new node (it waits on relocation) + ensureGreen("test"); // move all shards to the new nodes (it waits on relocation) + final int numIters = randomIntBetween(10, 20); for (int i = 0; i < numIters; i++) { + logger.info(" --> checking iteration {}", i); SearchResponse afterRelocation = client().prepareSearch().setSize(ids.size()).get(); assertNoFailures(afterRelocation); assertSearchHits(afterRelocation, ids.toArray(new String[ids.size()])); @@ -505,7 +503,8 @@ public class RelocationIT extends ESIntegTestCase { if (chunkRequest.name().startsWith(IndexFileNames.SEGMENTS)) { // corrupting the segments_N files in order to make sure future recovery re-send files logger.debug("corrupting [{}] to {}. file name: [{}]", action, node, chunkRequest.name()); - byte[] array = chunkRequest.content().array(); + assert chunkRequest.content().toBytesRef().bytes == chunkRequest.content().toBytesRef().bytes : "no internal reference!!"; + byte[] array = chunkRequest.content().toBytesRef().bytes; array[0] = (byte) ~array[0]; // flip one byte in the content corruptionCount.countDown(); } diff --git a/core/src/test/java/org/elasticsearch/recovery/SimpleRecoveryIT.java b/core/src/test/java/org/elasticsearch/recovery/SimpleRecoveryIT.java index 6ccdae5e5ec..e860cfc9ebd 100644 --- a/core/src/test/java/org/elasticsearch/recovery/SimpleRecoveryIT.java +++ b/core/src/test/java/org/elasticsearch/recovery/SimpleRecoveryIT.java @@ -52,9 +52,6 @@ public class SimpleRecoveryIT extends ESIntegTestCase { NumShards numShards = getNumShards("test"); - logger.info("Running Cluster Health"); - ensureYellow(); - client().index(indexRequest("test").type("type1").id("1").source(source("1", "test"))).actionGet(); FlushResponse flushResponse = client().admin().indices().flush(flushRequest("test")).actionGet(); assertThat(flushResponse.getTotalShards(), equalTo(numShards.totalNumShards)); diff --git a/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java index 3d46c0bbacf..c9d8ff81aa2 100644 --- a/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/core/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRes import org.elasticsearch.client.Client; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -46,7 +45,6 @@ import java.util.List; import java.util.stream.Collectors; import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.blobId; -import static org.elasticsearch.repositories.blobstore.BlobStoreRepository.parseNameUUIDFromBlobName; import static org.hamcrest.Matchers.equalTo; /** @@ -102,13 +100,96 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase { (BlobStoreRepository) repositoriesService.repository(repositoryName); final List originalSnapshots = Arrays.asList(snapshotId1, snapshotId2); - List snapshotIds = repository.snapshots().stream() + List snapshotIds = repository.getSnapshots().stream() .sorted((s1, s2) -> s1.getName().compareTo(s2.getName())) .collect(Collectors.toList()); assertThat(snapshotIds, equalTo(originalSnapshots)); } - public void testSnapshotIndexFile() throws Exception { + public void testReadAndWriteSnapshotsThroughIndexFile() throws Exception { + final BlobStoreRepository repository = setupRepo(); + + // write to and read from a snapshot file with no entries + assertThat(repository.getSnapshots().size(), equalTo(0)); + repository.writeSnapshotsToIndexGen(Collections.emptyList()); + assertThat(repository.getSnapshots().size(), equalTo(0)); + + // write to and read from a snapshot file with a random number of entries + final int numSnapshots = randomIntBetween(1, 1000); + final List snapshotIds = new ArrayList<>(numSnapshots); + for (int i = 0; i < numSnapshots; i++) { + snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + } + repository.writeSnapshotsToIndexGen(snapshotIds); + assertThat(repository.getSnapshots(), equalTo(snapshotIds)); + } + + public void testIndexGenerationalFiles() throws Exception { + final BlobStoreRepository repository = setupRepo(); + + // write to index generational file + final int numSnapshots = randomIntBetween(1, 1000); + final List snapshotIds = new ArrayList<>(numSnapshots); + for (int i = 0; i < numSnapshots; i++) { + snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + } + repository.writeSnapshotsToIndexGen(snapshotIds); + assertThat(Sets.newHashSet(repository.readSnapshotsFromIndex()), equalTo(Sets.newHashSet(snapshotIds))); + assertThat(repository.latestIndexBlobId(), equalTo(0L)); + assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(0L)); + + // adding more and writing to a new index generational file + for (int i = 0; i < 10; i++) { + snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + } + repository.writeSnapshotsToIndexGen(snapshotIds); + assertThat(Sets.newHashSet(repository.readSnapshotsFromIndex()), equalTo(Sets.newHashSet(snapshotIds))); + assertThat(repository.latestIndexBlobId(), equalTo(1L)); + assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(1L)); + + // removing a snapshot adn writing to a new index generational file + snapshotIds.remove(0); + repository.writeSnapshotsToIndexGen(snapshotIds); + assertThat(Sets.newHashSet(repository.readSnapshotsFromIndex()), equalTo(Sets.newHashSet(snapshotIds))); + assertThat(repository.latestIndexBlobId(), equalTo(2L)); + assertThat(repository.readSnapshotIndexLatestBlob(), equalTo(2L)); + } + + public void testOldIndexFileFormat() throws Exception { + final BlobStoreRepository repository = setupRepo(); + + // write old index file format + final int numOldSnapshots = randomIntBetween(1, 50); + final List snapshotIds = new ArrayList<>(); + for (int i = 0; i < numOldSnapshots; i++) { + snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), SnapshotId.UNASSIGNED_UUID)); + } + writeOldFormat(repository, snapshotIds.stream().map(SnapshotId::getName).collect(Collectors.toList())); + assertThat(Sets.newHashSet(repository.getSnapshots()), equalTo(Sets.newHashSet(snapshotIds))); + + // write to and read from a snapshot file with a random number of new entries added + final int numSnapshots = randomIntBetween(1, 1000); + for (int i = 0; i < numSnapshots; i++) { + snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); + } + repository.writeSnapshotsToIndexGen(snapshotIds); + assertThat(Sets.newHashSet(repository.getSnapshots()), equalTo(Sets.newHashSet(snapshotIds))); + } + + public void testBlobId() { + SnapshotId snapshotId = new SnapshotId("abc123", SnapshotId.UNASSIGNED_UUID); + assertThat(blobId(snapshotId), equalTo("abc123")); // just the snapshot name + snapshotId = new SnapshotId("abc-123", SnapshotId.UNASSIGNED_UUID); + assertThat(blobId(snapshotId), equalTo("abc-123")); // just the snapshot name + String uuid = UUIDs.randomBase64UUID(); + snapshotId = new SnapshotId("abc123", uuid); + assertThat(blobId(snapshotId), equalTo("abc123-" + uuid)); // snapshot name + '-' + uuid + uuid = UUIDs.randomBase64UUID(); + snapshotId = new SnapshotId("abc-123", uuid); + assertThat(blobId(snapshotId), equalTo("abc-123-" + uuid)); // snapshot name + '-' + uuid + } + + private BlobStoreRepository setupRepo() { final Client client = client(); final Path location = ESIntegTestCase.randomRepoPath(node().settings()); final String repositoryName = "test-repo"; @@ -123,89 +204,7 @@ public class BlobStoreRepositoryTests extends ESSingleNodeTestCase { final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); @SuppressWarnings("unchecked") final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(repositoryName); - - // write to and read from a snapshot file with no entries - repository.writeSnapshotList(Collections.emptyList()); - List readSnapshotIds = repository.readSnapshotList(); - assertThat(readSnapshotIds.size(), equalTo(0)); - - // write to and read from a snapshot file with a random number of entries - final int numSnapshots = randomIntBetween(1, 1000); - final List snapshotIds = new ArrayList<>(numSnapshots); - for (int i = 0; i < numSnapshots; i++) { - snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); - } - repository.writeSnapshotList(snapshotIds); - readSnapshotIds = repository.readSnapshotList(); - assertThat(readSnapshotIds, equalTo(snapshotIds)); - } - - public void testOldIndexFileFormat() throws Exception { - final Client client = client(); - final Path location = ESIntegTestCase.randomRepoPath(node().settings()); - final String repositoryName = "test-repo"; - - PutRepositoryResponse putRepositoryResponse = - client.admin().cluster().preparePutRepository(repositoryName) - .setType("fs") - .setSettings(Settings.builder().put(node().settings()).put("location", location)) - .get(); - assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); - - final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class); - @SuppressWarnings("unchecked") final BlobStoreRepository repository = - (BlobStoreRepository) repositoriesService.repository(repositoryName); - - // write old index file format - final int numOldSnapshots = randomIntBetween(1, 50); - final List snapshotIds = new ArrayList<>(); - for (int i = 0; i < numOldSnapshots; i++) { - snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), SnapshotId.UNASSIGNED_UUID)); - } - writeOldFormat(repository, snapshotIds.stream().map(SnapshotId::getName).collect(Collectors.toList())); - List readSnapshotIds = repository.readSnapshotList(); - assertThat(Sets.newHashSet(readSnapshotIds), equalTo(Sets.newHashSet(snapshotIds))); - - // write to and read from a snapshot file with a random number of new entries added - final int numSnapshots = randomIntBetween(1, 1000); - for (int i = 0; i < numSnapshots; i++) { - snapshotIds.add(new SnapshotId(randomAsciiOfLength(8), UUIDs.randomBase64UUID())); - } - repository.writeSnapshotList(snapshotIds); - readSnapshotIds = repository.readSnapshotList(); - assertThat(Sets.newHashSet(readSnapshotIds), equalTo(Sets.newHashSet(snapshotIds))); - } - - public void testParseUUIDFromBlobName() { - String blobStr = "abc123"; - Tuple pair = parseNameUUIDFromBlobName(blobStr); - assertThat(pair.v1(), equalTo(blobStr)); // snapshot name - assertThat(pair.v2(), equalTo(SnapshotId.UNASSIGNED_UUID)); // snapshot uuid - blobStr = "abcefghijklmnopqrstuvwxyz"; - pair = parseNameUUIDFromBlobName(blobStr); - assertThat(pair.v1(), equalTo(blobStr)); - assertThat(pair.v2(), equalTo(SnapshotId.UNASSIGNED_UUID)); - blobStr = "abc123-xyz"; // not enough characters after '-' to have a uuid - pair = parseNameUUIDFromBlobName(blobStr); - assertThat(pair.v1(), equalTo(blobStr)); - assertThat(pair.v2(), equalTo(SnapshotId.UNASSIGNED_UUID)); - blobStr = "abc123-a1b2c3d4e5f6g7h8i9j0k1"; - pair = parseNameUUIDFromBlobName(blobStr); - assertThat(pair.v1(), equalTo("abc123")); - assertThat(pair.v2(), equalTo("a1b2c3d4e5f6g7h8i9j0k1")); - } - - public void testBlobId() { - SnapshotId snapshotId = new SnapshotId("abc123", SnapshotId.UNASSIGNED_UUID); - assertThat(blobId(snapshotId), equalTo("abc123")); // just the snapshot name - snapshotId = new SnapshotId("abc-123", SnapshotId.UNASSIGNED_UUID); - assertThat(blobId(snapshotId), equalTo("abc-123")); // just the snapshot name - String uuid = UUIDs.randomBase64UUID(); - snapshotId = new SnapshotId("abc123", uuid); - assertThat(blobId(snapshotId), equalTo("abc123-" + uuid)); // snapshot name + '-' + uuid - uuid = UUIDs.randomBase64UUID(); - snapshotId = new SnapshotId("abc-123", uuid); - assertThat(blobId(snapshotId), equalTo("abc-123-" + uuid)); // snapshot name + '-' + uuid + return repository; } private void writeOldFormat(final BlobStoreRepository repository, final List snapshotNames) throws Exception { diff --git a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java index aa3b11e6250..051159b448b 100644 --- a/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java +++ b/core/src/test/java/org/elasticsearch/rest/BytesRestResponseTests.java @@ -36,17 +36,21 @@ import java.io.IOException; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -/** - * - */ public class BytesRestResponseTests extends ESTestCase { + class UnknownException extends Exception { + + public UnknownException(final String message, final Throwable cause) { + super(message, cause); + } + + } + public void testWithHeaders() throws Exception { RestRequest request = new FakeRestRequest(); RestChannel channel = randomBoolean() ? new DetailedExceptionRestChannel(request) : new SimpleExceptionRestChannel(request); @@ -62,9 +66,9 @@ public class BytesRestResponseTests extends ESTestCase { RestRequest request = new FakeRestRequest(); RestChannel channel = new SimpleExceptionRestChannel(request); - Throwable t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); + Exception t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); BytesRestResponse response = new BytesRestResponse(channel, t); - String text = response.content().toUtf8(); + String text = response.content().utf8ToString(); assertThat(text, containsString("ElasticsearchException[an error occurred reading data]")); assertThat(text, not(containsString("FileNotFoundException"))); assertThat(text, not(containsString("/foo/bar"))); @@ -75,9 +79,9 @@ public class BytesRestResponseTests extends ESTestCase { RestRequest request = new FakeRestRequest(); RestChannel channel = new DetailedExceptionRestChannel(request); - Throwable t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); + Exception t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); BytesRestResponse response = new BytesRestResponse(channel, t); - String text = response.content().toUtf8(); + String text = response.content().utf8ToString(); assertThat(text, containsString("{\"type\":\"exception\",\"reason\":\"an error occurred reading data\"}")); assertThat(text, containsString("{\"type\":\"file_not_found_exception\",\"reason\":\"/foo/bar\"}")); } @@ -86,10 +90,10 @@ public class BytesRestResponseTests extends ESTestCase { RestRequest request = new FakeRestRequest(); RestChannel channel = new SimpleExceptionRestChannel(request); - Throwable t = new Throwable("an error occurred reading data", new FileNotFoundException("/foo/bar")); + Exception t = new UnknownException("an error occurred reading data", new FileNotFoundException("/foo/bar")); BytesRestResponse response = new BytesRestResponse(channel, t); - String text = response.content().toUtf8(); - assertThat(text, not(containsString("Throwable[an error occurred reading data]"))); + String text = response.content().utf8ToString(); + assertThat(text, not(containsString("UnknownException[an error occurred reading data]"))); assertThat(text, not(containsString("FileNotFoundException[/foo/bar]"))); assertThat(text, not(containsString("error_trace"))); assertThat(text, containsString("\"error\":\"No ElasticsearchException found\"")); @@ -100,10 +104,10 @@ public class BytesRestResponseTests extends ESTestCase { request.params().put("error_trace", "true"); RestChannel channel = new DetailedExceptionRestChannel(request); - Throwable t = new Throwable("an error occurred reading data", new FileNotFoundException("/foo/bar")); + Exception t = new UnknownException("an error occurred reading data", new FileNotFoundException("/foo/bar")); BytesRestResponse response = new BytesRestResponse(channel, t); - String text = response.content().toUtf8(); - assertThat(text, containsString("\"type\":\"throwable\",\"reason\":\"an error occurred reading data\"")); + String text = response.content().utf8ToString(); + assertThat(text, containsString("\"type\":\"unknown_exception\",\"reason\":\"an error occurred reading data\"")); assertThat(text, containsString("{\"type\":\"file_not_found_exception\"")); assertThat(text, containsString("\"stack_trace\":\"[an error occurred reading data]")); } @@ -112,15 +116,15 @@ public class BytesRestResponseTests extends ESTestCase { RestRequest request = new FakeRestRequest(); RestChannel channel = new DetailedExceptionRestChannel(request); { - Throwable t = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); - BytesRestResponse response = new BytesRestResponse(channel, t); - String text = response.content().toUtf8(); + Exception e = new ElasticsearchException("an error occurred reading data", new FileNotFoundException("/foo/bar")); + BytesRestResponse response = new BytesRestResponse(channel, e); + String text = response.content().utf8ToString(); assertThat(text, containsString("{\"root_cause\":[{\"type\":\"exception\",\"reason\":\"an error occurred reading data\"}]")); } { - Throwable t = new FileNotFoundException("/foo/bar"); - BytesRestResponse response = new BytesRestResponse(channel, t); - String text = response.content().toUtf8(); + Exception e = new FileNotFoundException("/foo/bar"); + BytesRestResponse response = new BytesRestResponse(channel, e); + String text = response.content().utf8ToString(); assertThat(text, containsString("{\"root_cause\":[{\"type\":\"file_not_found_exception\",\"reason\":\"/foo/bar\"}]")); } } @@ -130,7 +134,7 @@ public class BytesRestResponseTests extends ESTestCase { RestChannel channel = new SimpleExceptionRestChannel(request); BytesRestResponse response = new BytesRestResponse(channel, null); - String text = response.content().toUtf8(); + String text = response.content().utf8ToString(); assertThat(text, containsString("\"error\":\"unknown\"")); assertThat(text, not(containsString("error_trace"))); } @@ -144,7 +148,7 @@ public class BytesRestResponseTests extends ESTestCase { new SearchShardTarget("node_1", new Index("foo", "_na_"), 2)); SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] {failure, failure1}); BytesRestResponse response = new BytesRestResponse(channel, new RemoteTransportException("foo", ex)); - String text = response.content().toUtf8(); + String text = response.content().utf8ToString(); String expected = "{\"error\":{\"root_cause\":[{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}],\"type\":\"search_phase_execution_exception\",\"reason\":\"all shards failed\",\"phase\":\"search\",\"grouped\":true,\"failed_shards\":[{\"shard\":1,\"index\":\"foo\",\"node\":\"node_1\",\"reason\":{\"type\":\"parsing_exception\",\"reason\":\"foobar\",\"line\":1,\"col\":2}}]},\"status\":400}"; assertEquals(expected.trim(), text.trim()); String stackTrace = ExceptionsHelper.stackTrace(ex); @@ -160,7 +164,7 @@ public class BytesRestResponseTests extends ESTestCase { // if we try to decode the path, this will throw an IllegalArgumentException again final BytesRestResponse response = new BytesRestResponse(channel, e); assertNotNull(response.content()); - final String content = response.content().toUtf8(); + final String content = response.content().utf8ToString(); assertThat(content, containsString("\"type\":\"illegal_argument_exception\"")); assertThat(content, containsString("\"reason\":\"partial escape sequence at end of string: %a\"")); assertThat(content, containsString("\"status\":" + 400)); @@ -171,7 +175,7 @@ public class BytesRestResponseTests extends ESTestCase { final RestChannel channel = new DetailedExceptionRestChannel(request); final BytesRestResponse response = new BytesRestResponse(channel, new ElasticsearchException("simulated")); assertNotNull(response.content()); - final String content = response.content().toUtf8(); + final String content = response.content().utf8ToString(); assertThat(content, containsString("\"type\":\"exception\"")); assertThat(content, containsString("\"reason\":\"simulated\"")); assertThat(content, containsString("\"status\":" + 500)); diff --git a/core/src/test/java/org/elasticsearch/rest/DeprecationRestHandlerTests.java b/core/src/test/java/org/elasticsearch/rest/DeprecationRestHandlerTests.java new file mode 100644 index 00000000000..be0f3b15115 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/rest/DeprecationRestHandlerTests.java @@ -0,0 +1,139 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.rest; + +import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.test.ESTestCase; + +import org.mockito.InOrder; + +import static org.mockito.Mockito.inOrder; +import static org.mockito.Mockito.mock; + +/** + * Tests {@link DeprecationRestHandler}. + */ +public class DeprecationRestHandlerTests extends ESTestCase { + + private final RestHandler handler = mock(RestHandler.class); + /** + * Note: Headers should only use US ASCII (and this inevitably becomes one!). + */ + private final String deprecationMessage = randomAsciiOfLengthBetween(1, 30); + private final DeprecationLogger deprecationLogger = mock(DeprecationLogger.class); + + public void testNullHandler() { + expectThrows(NullPointerException.class, () -> new DeprecationRestHandler(null, deprecationMessage, deprecationLogger)); + } + + public void testInvalidDeprecationMessageThrowsException() { + String invalidDeprecationMessage = randomFrom("", null, " "); + + expectThrows(IllegalArgumentException.class, + () -> new DeprecationRestHandler(handler, invalidDeprecationMessage, deprecationLogger)); + } + + public void testNullDeprecationLogger() { + expectThrows(NullPointerException.class, () -> new DeprecationRestHandler(handler, deprecationMessage, null)); + } + + public void testHandleRequestLogsWarningThenForwards() throws Exception { + RestRequest request = mock(RestRequest.class); + RestChannel channel = mock(RestChannel.class); + NodeClient client = mock(NodeClient.class); + + DeprecationRestHandler deprecatedHandler = new DeprecationRestHandler(handler, deprecationMessage, deprecationLogger); + + // test it + deprecatedHandler.handleRequest(request, channel, client); + + InOrder inOrder = inOrder(handler, request, channel, deprecationLogger); + + // log, then forward + inOrder.verify(deprecationLogger).deprecated(deprecationMessage); + inOrder.verify(handler).handleRequest(request, channel, client); + inOrder.verifyNoMoreInteractions(); + } + + public void testValidHeaderValue() { + ASCIIHeaderGenerator generator = new ASCIIHeaderGenerator(); + String value = generator.ofCodeUnitsLength(random(), 1, 50); + + assertTrue(DeprecationRestHandler.validHeaderValue(value)); + assertSame(value, DeprecationRestHandler.requireValidHeader(value)); + } + + public void testInvalidHeaderValue() { + ASCIIHeaderGenerator generator = new ASCIIHeaderGenerator(); + String value = generator.ofCodeUnitsLength(random(), 0, 25) + + randomFrom('\t', '\0', '\n', (char)27 /* ESC */, (char)31 /* unit separator*/, (char)127 /* DEL */) + + generator.ofCodeUnitsLength(random(), 0, 25); + + assertFalse(DeprecationRestHandler.validHeaderValue(value)); + + expectThrows(IllegalArgumentException.class, () -> DeprecationRestHandler.requireValidHeader(value)); + } + + public void testInvalidHeaderValueNull() { + assertFalse(DeprecationRestHandler.validHeaderValue(null)); + + expectThrows(IllegalArgumentException.class, () -> DeprecationRestHandler.requireValidHeader(null)); + } + + public void testInvalidHeaderValueEmpty() { + String blank = randomFrom("", "\t", " "); + + assertFalse(DeprecationRestHandler.validHeaderValue(blank)); + + expectThrows(IllegalArgumentException.class, () -> DeprecationRestHandler.requireValidHeader(blank)); + } + + /** + * {@code ASCIIHeaderGenerator} only uses characters expected to be valid in headers (simplified US-ASCII). + */ + private static class ASCIIHeaderGenerator extends CodepointSetGenerator { + /** + * Create a character array for characters [{@code from}, {@code to}]. + * + * @param from Starting code point (inclusive). + * @param to Ending code point (inclusive). + * @return Never {@code null}. + */ + static char[] asciiFromTo(int from, int to) { + char[] chars = new char[to - from + 1]; + + for (int i = from; i <= to; ++i) { + chars[i - from] = (char)i; + } + + return chars; + } + + /** + * Create a generator for characters [32, 126]. + */ + public ASCIIHeaderGenerator() { + super(asciiFromTo(32, 126)); + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 9cade7aa513..dca0a16f0f0 100644 --- a/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/core/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -19,86 +19,57 @@ package org.elasticsearch.rest; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; - -import static org.hamcrest.CoreMatchers.equalTo; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.eq; +import static org.mockito.Mockito.doCallRealMethod; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; public class RestControllerTests extends ESTestCase { - public void testRegisterRelevantHeaders() throws InterruptedException { - - final RestController restController = new RestController(Settings.EMPTY); - - int iterations = randomIntBetween(1, 5); - - Set headers = new HashSet<>(); - ExecutorService executorService = Executors.newFixedThreadPool(iterations); - for (int i = 0; i < iterations; i++) { - int headersCount = randomInt(10); - final Set newHeaders = new HashSet<>(); - for (int j = 0; j < headersCount; j++) { - String usefulHeader = randomRealisticUnicodeOfLengthBetween(1, 30); - newHeaders.add(usefulHeader); - } - headers.addAll(newHeaders); - - executorService.submit((Runnable) () -> restController.registerRelevantHeaders(newHeaders.toArray(new String[newHeaders.size()]))); - } - - executorService.shutdown(); - assertThat(executorService.awaitTermination(1, TimeUnit.SECONDS), equalTo(true)); - String[] relevantHeaders = restController.relevantHeaders().toArray(new String[restController.relevantHeaders().size()]); - assertThat(relevantHeaders.length, equalTo(headers.size())); - - Arrays.sort(relevantHeaders); - String[] headersArray = new String[headers.size()]; - headersArray = headers.toArray(headersArray); - Arrays.sort(headersArray); - assertThat(relevantHeaders, equalTo(headersArray)); - } - public void testApplyRelevantHeaders() throws Exception { final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - final RestController restController = new RestController(Settings.EMPTY) { + Set headers = new HashSet<>(Arrays.asList("header.1", "header.2")); + final RestController restController = new RestController(Settings.EMPTY, headers) { @Override boolean checkRequestParameters(RestRequest request, RestChannel channel) { return true; } @Override - void executeHandler(RestRequest request, RestChannel channel) throws Exception { + void executeHandler(RestRequest request, RestChannel channel, NodeClient client) throws Exception { assertEquals("true", threadContext.getHeader("header.1")); assertEquals("true", threadContext.getHeader("header.2")); assertNull(threadContext.getHeader("header.3")); - } }; threadContext.putHeader("header.3", "true"); - restController.registerRelevantHeaders("header.1", "header.2"); Map restHeaders = new HashMap<>(); restHeaders.put("header.1", "true"); restHeaders.put("header.2", "true"); restHeaders.put("header.3", "false"); - restController.dispatchRequest(new FakeRestRequest.Builder().withHeaders(restHeaders).build(), null, threadContext); + restController.dispatchRequest(new FakeRestRequest.Builder().withHeaders(restHeaders).build(), null, null, threadContext); assertNull(threadContext.getHeader("header.1")); assertNull(threadContext.getHeader("header.2")); assertEquals("true", threadContext.getHeader("header.3")); } public void testCanTripCircuitBreaker() throws Exception { - RestController controller = new RestController(Settings.EMPTY); + RestController controller = new RestController(Settings.EMPTY, Collections.emptySet()); // trip circuit breaker by default controller.registerHandler(RestRequest.Method.GET, "/trip", new FakeRestHandler(true)); controller.registerHandler(RestRequest.Method.GET, "/do-not-trip", new FakeRestHandler(false)); @@ -109,6 +80,48 @@ public class RestControllerTests extends ESTestCase { assertFalse(controller.canTripCircuitBreaker(new FakeRestRequest.Builder().withPath("/do-not-trip").build())); } + public void testRegisterAsDeprecatedHandler() { + RestController controller = mock(RestController.class); + + RestRequest.Method method = randomFrom(RestRequest.Method.values()); + String path = "/_" + randomAsciiOfLengthBetween(1, 6); + RestHandler handler = mock(RestHandler.class); + String deprecationMessage = randomAsciiOfLengthBetween(1, 10); + DeprecationLogger logger = mock(DeprecationLogger.class); + + // don't want to test everything -- just that it actually wraps the handler + doCallRealMethod().when(controller).registerAsDeprecatedHandler(method, path, handler, deprecationMessage, logger); + + controller.registerAsDeprecatedHandler(method, path, handler, deprecationMessage, logger); + + verify(controller).registerHandler(eq(method), eq(path), any(DeprecationRestHandler.class)); + } + + public void testRegisterWithDeprecatedHandler() { + final RestController controller = mock(RestController.class); + + final RestRequest.Method method = randomFrom(RestRequest.Method.values()); + final String path = "/_" + randomAsciiOfLengthBetween(1, 6); + final RestHandler handler = mock(RestHandler.class); + final RestRequest.Method deprecatedMethod = randomFrom(RestRequest.Method.values()); + final String deprecatedPath = "/_" + randomAsciiOfLengthBetween(1, 6); + final DeprecationLogger logger = mock(DeprecationLogger.class); + + final String deprecationMessage = "[" + deprecatedMethod.name() + " " + deprecatedPath + "] is deprecated! Use [" + + method.name() + " " + path + "] instead."; + + // don't want to test everything -- just that it actually wraps the handlers + doCallRealMethod().when(controller).registerWithDeprecatedHandler(method, path, handler, deprecatedMethod, deprecatedPath, logger); + + controller.registerWithDeprecatedHandler(method, path, handler, deprecatedMethod, deprecatedPath, logger); + + verify(controller).registerHandler(method, path, handler); + verify(controller).registerAsDeprecatedHandler(deprecatedMethod, deprecatedPath, handler, deprecationMessage, logger); + } + + /** + * Useful for testing with deprecation handler. + */ private static class FakeRestHandler implements RestHandler { private final boolean canTripCircuitBreaker; @@ -117,7 +130,7 @@ public class RestControllerTests extends ESTestCase { } @Override - public void handleRequest(RestRequest request, RestChannel channel) throws Exception { + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { //no op } diff --git a/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java b/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java index 51f36d1e25f..5013d637436 100644 --- a/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java +++ b/core/src/test/java/org/elasticsearch/rest/RestFilterChainTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.rest; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -39,7 +40,7 @@ import static org.hamcrest.CoreMatchers.equalTo; public class RestFilterChainTests extends ESTestCase { public void testRestFilters() throws Exception { - RestController restController = new RestController(Settings.EMPTY); + RestController restController = new RestController(Settings.EMPTY, Collections.emptySet()); int numFilters = randomInt(10); Set orders = new HashSet<>(numFilters); @@ -71,16 +72,13 @@ public class RestFilterChainTests extends ESTestCase { } } - restController.registerHandler(RestRequest.Method.GET, "/", new RestHandler() { - @Override - public void handleRequest(RestRequest request, RestChannel channel) throws Exception { - channel.sendResponse(new TestResponse()); - } + restController.registerHandler(RestRequest.Method.GET, "/", (request, channel, client) -> { + channel.sendResponse(new TestResponse()); }); FakeRestRequest fakeRestRequest = new FakeRestRequest(); FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), 1); - restController.dispatchRequest(fakeRestRequest, fakeRestChannel, new ThreadContext(Settings.EMPTY)); + restController.dispatchRequest(fakeRestRequest, fakeRestChannel, null, new ThreadContext(Settings.EMPTY)); assertThat(fakeRestChannel.await(), equalTo(true)); @@ -117,28 +115,25 @@ public class RestFilterChainTests extends ESTestCase { final int additionalContinueCount = randomInt(10); - TestFilter testFilter = new TestFilter(randomInt(), new Callback() { - @Override - public void execute(final RestRequest request, final RestChannel channel, final RestFilterChain filterChain) throws Exception { - for (int i = 0; i <= additionalContinueCount; i++) { - filterChain.continueProcessing(request, channel); - } + TestFilter testFilter = new TestFilter(randomInt(), (request, channel, client, filterChain) -> { + for (int i = 0; i <= additionalContinueCount; i++) { + filterChain.continueProcessing(request, channel, null); } }); - RestController restController = new RestController(Settings.EMPTY); + RestController restController = new RestController(Settings.EMPTY, Collections.emptySet()); restController.registerFilter(testFilter); restController.registerHandler(RestRequest.Method.GET, "/", new RestHandler() { @Override - public void handleRequest(RestRequest request, RestChannel channel) throws Exception { + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { channel.sendResponse(new TestResponse()); } }); FakeRestRequest fakeRestRequest = new FakeRestRequest(); FakeRestChannel fakeRestChannel = new FakeRestChannel(fakeRestRequest, randomBoolean(), additionalContinueCount + 1); - restController.dispatchRequest(fakeRestRequest, fakeRestChannel, new ThreadContext(Settings.EMPTY)); + restController.dispatchRequest(fakeRestRequest, fakeRestChannel, null, new ThreadContext(Settings.EMPTY)); fakeRestChannel.await(); assertThat(testFilter.runs.get(), equalTo(1)); @@ -147,23 +142,23 @@ public class RestFilterChainTests extends ESTestCase { assertThat(fakeRestChannel.errors().get(), equalTo(additionalContinueCount)); } - private static enum Operation implements Callback { + private enum Operation implements Callback { CONTINUE_PROCESSING { @Override - public void execute(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { - filterChain.continueProcessing(request, channel); + public void execute(RestRequest request, RestChannel channel, NodeClient client, RestFilterChain filterChain) throws Exception { + filterChain.continueProcessing(request, channel, client); } }, CHANNEL_RESPONSE { @Override - public void execute(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { + public void execute(RestRequest request, RestChannel channel, NodeClient client, RestFilterChain filterChain) throws Exception { channel.sendResponse(new TestResponse()); } } } - private static interface Callback { - void execute(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception; + private interface Callback { + void execute(RestRequest request, RestChannel channel, NodeClient client, RestFilterChain filterChain) throws Exception; } private final AtomicInteger counter = new AtomicInteger(); @@ -180,10 +175,10 @@ public class RestFilterChainTests extends ESTestCase { } @Override - public void process(RestRequest request, RestChannel channel, RestFilterChain filterChain) throws Exception { + public void process(RestRequest request, RestChannel channel, NodeClient client, RestFilterChain filterChain) throws Exception { this.runs.incrementAndGet(); this.executionToken = counter.incrementAndGet(); - this.callback.execute(request, channel, filterChain); + this.callback.execute(request, channel, client, filterChain); } @Override diff --git a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java index 8541b5785c8..4b2a6703252 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeActionTests.java @@ -25,9 +25,11 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ESTestCase; +import org.hamcrest.core.IsNull; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; public class RestAnalyzeActionTests extends ESTestCase { @@ -46,8 +48,45 @@ public class RestAnalyzeActionTests extends ESTestCase { assertThat(analyzeRequest.text().length, equalTo(1)); assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); - assertThat(analyzeRequest.tokenizer(), equalTo("keyword")); - assertThat(analyzeRequest.tokenFilters(), equalTo(new String[]{"lowercase"})); + assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); + assertThat(analyzeRequest.tokenFilters().size(), equalTo(1)); + for (AnalyzeRequest.NameOrDefinition filter : analyzeRequest.tokenFilters()) { + assertThat(filter.name, equalTo("lowercase")); + } + } + + public void testParseXContentForAnalyzeRequestWithCustomFilters() throws Exception { + BytesReference content = XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .startArray("filter") + .value("lowercase") + .startObject() + .field("type", "stop") + .array("stopwords", "foo", "buzz") + .endObject() + .endArray() + .startArray("char_filter") + .startObject() + .field("type", "mapping") + .array("mappings", "ph => f", "qu => q") + .endObject() + .endArray() + .endObject().bytes(); + + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + + RestAnalyzeAction.buildFromContent(content, analyzeRequest, new ParseFieldMatcher(Settings.EMPTY)); + + assertThat(analyzeRequest.text().length, equalTo(1)); + assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); + assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); + assertThat(analyzeRequest.tokenFilters().size(), equalTo(2)); + assertThat(analyzeRequest.tokenFilters().get(0).name, equalTo("lowercase")); + assertThat(analyzeRequest.tokenFilters().get(1).definition, notNullValue()); + assertThat(analyzeRequest.charFilters().size(), equalTo(1)); + assertThat(analyzeRequest.charFilters().get(0).definition, notNullValue()); } public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java new file mode 100644 index 00000000000..2d6d65d6344 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java @@ -0,0 +1,163 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.cat; + +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; +import org.elasticsearch.action.admin.indices.stats.CommonStats; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsTests; +import org.elasticsearch.action.admin.indices.stats.ShardStats; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; +import org.elasticsearch.common.Table; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.cache.query.QueryCacheStats; +import org.elasticsearch.index.cache.request.RequestCacheStats; +import org.elasticsearch.index.engine.SegmentsStats; +import org.elasticsearch.index.fielddata.FieldDataStats; +import org.elasticsearch.index.flush.FlushStats; +import org.elasticsearch.index.get.GetStats; +import org.elasticsearch.index.merge.MergeStats; +import org.elasticsearch.index.refresh.RefreshStats; +import org.elasticsearch.index.search.stats.SearchStats; +import org.elasticsearch.index.shard.DocsStats; +import org.elasticsearch.index.shard.IndexingStats; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardPath; +import org.elasticsearch.index.store.StoreStats; +import org.elasticsearch.index.warmer.WarmerStats; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.search.suggest.completion.CompletionStats; +import org.elasticsearch.test.ESTestCase; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static java.util.Collections.emptyList; +import static org.hamcrest.Matchers.equalTo; + +/** + * Tests for {@link RestIndicesAction} + */ +public class RestIndicesActionTests extends ESTestCase { + + public void testBuildTable() { + final Settings settings = Settings.EMPTY; + final RestController restController = new RestController(settings, Collections.emptySet()); + final RestIndicesAction action = new RestIndicesAction(settings, restController, new IndexNameExpressionResolver(settings)); + + // build a (semi-)random table + final int numIndices = randomIntBetween(0, 5); + Index[] indices = new Index[numIndices]; + for (int i = 0; i < numIndices; i++) { + indices[i] = new Index(randomAsciiOfLength(5), UUIDs.randomBase64UUID()); + } + + final MetaData.Builder metaDataBuilder = MetaData.builder(); + for (final Index index : indices) { + metaDataBuilder.put(IndexMetaData.builder(index.getName()) + .settings(Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID())) + .creationDate(System.currentTimeMillis()) + .numberOfShards(1) + .numberOfReplicas(1) + .state(IndexMetaData.State.OPEN)); + } + final MetaData metaData = metaDataBuilder.build(); + + final ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metaData(metaData) + .build(); + final String[] indicesStr = new String[indices.length]; + for (int i = 0; i < indices.length; i++) { + indicesStr[i] = indices[i].getName(); + } + final ClusterHealthResponse clusterHealth = new ClusterHealthResponse( + clusterState.getClusterName().value(), indicesStr, clusterState, 0, 0, 0, TimeValue.timeValueMillis(1000L) + ); + + final Table table = action.buildTable(null, indices, clusterHealth, randomIndicesStatsResponse(indices), metaData); + + // now, verify the table is correct + int count = 0; + List headers = table.getHeaders(); + assertThat(headers.get(count++).value, equalTo("health")); + assertThat(headers.get(count++).value, equalTo("status")); + assertThat(headers.get(count++).value, equalTo("index")); + assertThat(headers.get(count++).value, equalTo("uuid")); + + List> rows = table.getRows(); + assertThat(rows.size(), equalTo(indices.length)); + // TODO: more to verify (e.g. randomize cluster health, num primaries, num replicas, etc) + for (int i = 0; i < rows.size(); i++) { + count = 0; + final List row = rows.get(i); + assertThat(row.get(count++).value, equalTo("red*")); // all are red because cluster state doesn't have routing entries + assertThat(row.get(count++).value, equalTo("open")); // all are OPEN for now + assertThat(row.get(count++).value, equalTo(indices[i].getName())); + assertThat(row.get(count++).value, equalTo(indices[i].getUUID())); + } + } + + private IndicesStatsResponse randomIndicesStatsResponse(final Index[] indices) { + List shardStats = new ArrayList<>(); + for (final Index index : indices) { + for (int i = 0; i < 2; i++) { + ShardId shardId = new ShardId(index, i); + Path path = createTempDir().resolve("indices").resolve(index.getUUID()).resolve(String.valueOf(i)); + ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, null, i == 0, + new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)); + shardRouting = shardRouting.initialize("node-0", null, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE); + shardRouting = shardRouting.moveToStarted(); + CommonStats stats = new CommonStats(); + stats.fieldData = new FieldDataStats(); + stats.queryCache = new QueryCacheStats(); + stats.docs = new DocsStats(); + stats.store = new StoreStats(); + stats.indexing = new IndexingStats(); + stats.search = new SearchStats(); + stats.segments = new SegmentsStats(); + stats.merge = new MergeStats(); + stats.refresh = new RefreshStats(); + stats.completion = new CompletionStats(); + stats.requestCache = new RequestCacheStats(); + stats.get = new GetStats(); + stats.flush = new FlushStats(); + stats.warmer = new WarmerStats(); + shardStats.add(new ShardStats(shardRouting, new ShardPath(false, path, path, shardId), stats, null)); + } + } + return IndicesStatsTests.newIndicesStatsResponse( + shardStats.toArray(new ShardStats[shardStats.size()]), shardStats.size(), shardStats.size(), 0, emptyList() + ); + } +} diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java index b603ded8697..c0ca4eb2274 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestRecoveryActionTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; @@ -50,8 +51,8 @@ public class RestRecoveryActionTests extends ESTestCase { public void testRestRecoveryAction() { final Settings settings = Settings.EMPTY; - final RestController restController = new RestController(settings); - final RestRecoveryAction action = new RestRecoveryAction(settings, restController, restController, null); + final RestController restController = new RestController(settings, Collections.emptySet()); + final RestRecoveryAction action = new RestRecoveryAction(settings, restController, restController); final int totalShards = randomIntBetween(1, 32); final int successfulShards = Math.max(0, totalShards - randomIntBetween(1, 2)); final int failedShards = totalShards - successfulShards; diff --git a/core/src/test/java/org/elasticsearch/rest/action/main/RestMainActionIT.java b/core/src/test/java/org/elasticsearch/rest/action/main/RestMainActionIT.java deleted file mode 100644 index 72f8733292c..00000000000 --- a/core/src/test/java/org/elasticsearch/rest/action/main/RestMainActionIT.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.rest.action.main; - -import org.apache.http.util.EntityUtils; -import org.elasticsearch.client.Response; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESIntegTestCase; - -import java.io.IOException; -import java.util.Collections; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; - -public class RestMainActionIT extends ESIntegTestCase { - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put(NetworkModule.HTTP_ENABLED.getKey(), true) - .build(); - } - - public void testHeadRequest() throws IOException { - try (Response response = getRestClient().performRequest("HEAD", "/", Collections.emptyMap(), null)) { - assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); - assertNull(response.getEntity()); - } - } - - public void testGetRequest() throws IOException { - try (Response response = getRestClient().performRequest("GET", "/", Collections.emptyMap(), null)) { - assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); - assertNotNull(response.getEntity()); - assertThat(EntityUtils.toString(response.getEntity()), containsString("cluster_name")); - } - } -} diff --git a/core/src/test/java/org/elasticsearch/rest/action/main/RestMainActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/main/RestMainActionTests.java index ebb7dd255aa..ffefa074df7 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/main/RestMainActionTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/main/RestMainActionTests.java @@ -95,6 +95,6 @@ public class RestMainActionTests extends ESTestCase { } mainResponse.toXContent(responseBuilder, ToXContent.EMPTY_PARAMS); BytesReference xcontentBytes = responseBuilder.bytes(); - assertTrue(BytesReference.Helper.bytesEqual(xcontentBytes, response.content())); + assertEquals(xcontentBytes, response.content()); } } diff --git a/core/src/test/java/org/elasticsearch/rest/action/support/RestTableTests.java b/core/src/test/java/org/elasticsearch/rest/action/support/RestTableTests.java index a7e17785d48..3dfae8cc4f8 100644 --- a/core/src/test/java/org/elasticsearch/rest/action/support/RestTableTests.java +++ b/core/src/test/java/org/elasticsearch/rest/action/support/RestTableTests.java @@ -169,7 +169,7 @@ public class RestTableTests extends ESTestCase { private void assertResponse(Map headers, String mediaType, String body) throws Exception { RestResponse response = assertResponseContentType(headers, mediaType); - assertThat(response.content().toUtf8(), equalTo(body)); + assertThat(response.content().utf8ToString(), equalTo(body)); } private List getHeaderNames(List headers) { diff --git a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java index 9fbd4a81f19..f932317085b 100644 --- a/core/src/test/java/org/elasticsearch/script/FileScriptTests.java +++ b/core/src/test/java/org/elasticsearch/script/FileScriptTests.java @@ -58,7 +58,7 @@ public class FileScriptTests extends ESTestCase { CompiledScript compiledScript = scriptService.compile(script, ScriptContext.Standard.SEARCH, Collections.emptyMap()); assertNotNull(compiledScript); MockCompiledScript executable = (MockCompiledScript) compiledScript.compiled(); - assertEquals("script1.mockscript", executable.name); + assertEquals("script1.mockscript", executable.getName()); } public void testAllOpsDisabled() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/script/ScriptParameterParserTests.java b/core/src/test/java/org/elasticsearch/script/ScriptParameterParserTests.java deleted file mode 100644 index 94dc0085f7d..00000000000 --- a/core/src/test/java/org/elasticsearch/script/ScriptParameterParserTests.java +++ /dev/null @@ -1,1280 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.script; - - -import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.xcontent.ToXContent.MapParams; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.script.Script.ScriptParseException; -import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; -import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.Set; - -import static java.util.Collections.singleton; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; - -public class ScriptParameterParserTests extends ESTestCase { - public void testTokenDefaultInline() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"script\" : \"scriptValue\" }")); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - ScriptParameterParser paramParser = new ScriptParameterParser(); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - paramParser = new ScriptParameterParser(null); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - paramParser = new ScriptParameterParser(new HashSet()); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - } - - public void testTokenDefaultFile() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"script_file\" : \"scriptValue\" }")); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - ScriptParameterParser paramParser = new ScriptParameterParser(); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.FILE); - assertThat(paramParser.lang(), nullValue()); - } - - public void testTokenDefaultIndexed() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"script_id\" : \"scriptValue\" }")); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - ScriptParameterParser paramParser = new ScriptParameterParser(); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.STORED); - assertThat(paramParser.lang(), nullValue()); - } - - public void testTokenDefaultNotFound() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo\" : \"bar\" }")); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - ScriptParameterParser paramParser = new ScriptParameterParser(); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(false)); - assertThat(paramParser.getDefaultScriptParameterValue(), nullValue()); - assertThat(paramParser.getScriptParameterValue("script"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - } - - public void testTokenSingleParameter() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo\" : \"scriptValue\" }")); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - } - - public void testTokenSingleParameterFile() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo_file\" : \"scriptValue\" }")); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.FILE); - assertThat(paramParser.lang(), nullValue()); - } - - public void testTokenSingleParameterIndexed() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo_id\" : \"scriptValue\" }")); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.STORED); - assertThat(paramParser.lang(), nullValue()); - } - - public void testTokenSingleParameterDelcaredTwiceInlineFile() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo\" : \"scriptValue\", \"foo_file\" : \"scriptValue\" }")); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - try { - paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testTokenSingleParameterDelcaredTwiceInlineIndexed() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo\" : \"scriptValue\", \"foo_id\" : \"scriptValue\" }")); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - try { - paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testTokenSingleParameterDelcaredTwiceFileInline() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo_file\" : \"scriptValue\", \"foo\" : \"scriptValue\" }")); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.FILE); - assertThat(paramParser.lang(), nullValue()); - token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - try { - paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testTokenSingleParameterDelcaredTwiceFileIndexed() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo_file\" : \"scriptValue\", \"foo_id\" : \"scriptValue\" }")); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.FILE); - assertThat(paramParser.lang(), nullValue()); - token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - try { - paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testTokenSingleParameterDelcaredTwiceIndexedInline() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo_id\" : \"scriptValue\", \"foo\" : \"scriptValue\" }")); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.STORED); - assertThat(paramParser.lang(), nullValue()); - token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - try { - paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testTokenSingleParameterDelcaredTwiceIndexedFile() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo_id\" : \"scriptValue\", \"foo_file\" : \"scriptValue\" }")); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.STORED); - assertThat(paramParser.lang(), nullValue()); - token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - try { - paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testTokenMultipleParameters() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo\" : \"fooScriptValue\", \"bar_file\" : \"barScriptValue\", \"baz_id\" : \"bazScriptValue\" }")); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE); - assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.STORED); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - } - - public void testTokenMultipleParametersWithLang() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo\" : \"fooScriptValue\", \"bar_file\" : \"barScriptValue\", \"lang\" : \"myLang\", \"baz_id\" : \"bazScriptValue\" }")); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), equalTo("myLang")); - token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE); - assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.STORED); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), equalTo("myLang")); - } - - public void testTokenMultipleParametersNotFound() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"other\" : \"scriptValue\" }")); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(false)); - assertThat(paramParser.getScriptParameterValue("other"), nullValue()); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - } - - public void testTokenMultipleParametersSomeNotFound() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo\" : \"fooScriptValue\", \"other_file\" : \"barScriptValue\", \"baz_id\" : \"bazScriptValue\" }")); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other_file"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - Token token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other_file"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(false)); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other_file"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - token = parser.nextToken(); - while (token != Token.VALUE_STRING) { - token = parser.nextToken(); - } - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(true)); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.STORED); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other_file"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - } - - public void testTokenMultipleParametersWrongType() throws IOException { - XContentParser parser = XContentHelper.createParser(new BytesArray("{ \"foo\" : \"fooScriptValue\", \"bar_file\" : \"barScriptValue\", \"baz_id\" : \"bazScriptValue\" }")); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - assertThat(paramParser.token(parser.currentName(), parser.currentToken(), parser, ParseFieldMatcher.STRICT), equalTo(false)); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - } - - public void testReservedParameters() { - try { - new ScriptParameterParser(singleton("lang")); - fail("Expected IllegalArgumentException"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("lang is reserved")); - } - } - - public void testConfigDefaultInline() throws IOException { - Map config = new HashMap<>(); - config.put("script", "scriptValue"); - ScriptParameterParser paramParser = new ScriptParameterParser(); - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - assertThat(config.isEmpty(), equalTo(true)); - config = new HashMap<>(); - config.put("script", "scriptValue"); - paramParser = new ScriptParameterParser(null); - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - assertThat(config.isEmpty(), equalTo(true)); - config = new HashMap<>(); - config.put("script", "scriptValue"); - paramParser = new ScriptParameterParser(new HashSet()); - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - assertThat(config.isEmpty(), equalTo(true)); - } - - public void testConfigDefaultFile() throws IOException { - Map config = new HashMap<>(); - config.put("script_file", "scriptValue"); - ScriptParameterParser paramParser = new ScriptParameterParser(); - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.FILE); - assertThat(paramParser.lang(), nullValue()); - assertThat(config.isEmpty(), equalTo(true)); - } - - public void testConfigDefaultIndexed() throws IOException { - Map config = new HashMap<>(); - config.put("script_id", "scriptValue"); - ScriptParameterParser paramParser = new ScriptParameterParser(); - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.STORED); - assertThat(paramParser.lang(), nullValue()); - assertThat(config.isEmpty(), equalTo(true)); - } - - public void testConfigDefaultIndexedNoRemove() throws IOException { - Map config = new HashMap<>(); - config.put("script_id", "scriptValue"); - ScriptParameterParser paramParser = new ScriptParameterParser(); - paramParser.parseConfig(config, false, ParseFieldMatcher.STRICT); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.STORED); - assertThat(paramParser.lang(), nullValue()); - assertThat(config.size(), equalTo(1)); - assertThat((String) config.get("script_id"), equalTo("scriptValue")); - } - - public void testConfigDefaultNotFound() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "bar"); - ScriptParameterParser paramParser = new ScriptParameterParser(); - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - assertThat(paramParser.getDefaultScriptParameterValue(), nullValue()); - assertThat(paramParser.getScriptParameterValue("script"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - assertThat(config.size(), equalTo(1)); - assertThat((String) config.get("foo"), equalTo("bar")); - } - - public void testConfigSingleParameter() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - assertThat(config.isEmpty(), equalTo(true)); - } - - public void testConfigSingleParameterFile() throws IOException { - Map config = new HashMap<>(); - config.put("foo_file", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.FILE); - assertThat(paramParser.lang(), nullValue()); - assertThat(config.isEmpty(), equalTo(true)); - } - - public void testConfigSingleParameterIndexed() throws IOException { - Map config = new HashMap<>(); - config.put("foo_id", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.STORED); - assertThat(paramParser.lang(), nullValue()); - assertThat(config.isEmpty(), equalTo(true)); - } - - public void testConfigSingleParameterDelcaredTwiceInlineFile() throws IOException { - Map config = new LinkedHashMap<>(); - config.put("foo", "scriptValue"); - config.put("foo_file", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - try { - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testConfigSingleParameterDelcaredTwiceInlineIndexed() throws IOException { - Map config = new LinkedHashMap<>(); - config.put("foo", "scriptValue"); - config.put("foo_id", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - try { - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testConfigSingleParameterDelcaredTwiceFileInline() throws IOException { - Map config = new LinkedHashMap<>(); - config.put("foo_file", "scriptValue"); - config.put("foo", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - try { - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testConfigSingleParameterDelcaredTwiceFileIndexed() throws IOException { - Map config = new LinkedHashMap<>(); - config.put("foo_file", "scriptValue"); - config.put("foo_id", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - try { - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testConfigSingleParameterDelcaredTwiceIndexedInline() throws IOException { - Map config = new LinkedHashMap<>(); - config.put("foo_id", "scriptValue"); - config.put("foo", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - try { - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testConfigSingleParameterDelcaredTwiceIndexedFile() throws IOException { - Map config = new LinkedHashMap<>(); - config.put("foo_id", "scriptValue"); - config.put("foo_file", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - try { - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testConfigMultipleParameters() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "fooScriptValue"); - config.put("bar_file", "barScriptValue"); - config.put("baz_id", "bazScriptValue"); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE); - assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.STORED); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - assertThat(config.isEmpty(), equalTo(true)); - } - - public void testConfigMultipleParametersWithLang() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "fooScriptValue"); - config.put("bar_file", "barScriptValue"); - config.put("lang", "myLang"); - config.put("baz_id", "bazScriptValue"); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE); - assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.STORED); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), equalTo("myLang")); - assertThat(config.isEmpty(), equalTo(true)); - } - - public void testConfigMultipleParametersWithLangNoRemove() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "fooScriptValue"); - config.put("bar_file", "barScriptValue"); - config.put("lang", "myLang"); - config.put("baz_id", "bazScriptValue"); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - paramParser.parseConfig(config, false, ParseFieldMatcher.STRICT); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE); - assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.STORED); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), equalTo("myLang")); - assertThat(config.size(), equalTo(4)); - assertThat((String) config.get("foo"), equalTo("fooScriptValue")); - assertThat((String) config.get("bar_file"), equalTo("barScriptValue")); - assertThat((String) config.get("baz_id"), equalTo("bazScriptValue")); - assertThat((String) config.get("lang"), equalTo("myLang")); - } - - public void testConfigMultipleParametersNotFound() throws IOException { - Map config = new HashMap<>(); - config.put("other", "scriptValue"); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - assertThat(paramParser.getScriptParameterValue("other"), nullValue()); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - assertThat(config.size(), equalTo(1)); - assertThat((String) config.get("other"), equalTo("scriptValue")); - } - - public void testConfigMultipleParametersSomeNotFound() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "fooScriptValue"); - config.put("other_file", "barScriptValue"); - config.put("baz_id", "bazScriptValue"); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other_file"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.STORED); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other_file"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - assertThat(config.size(), equalTo(1)); - assertThat((String) config.get("other_file"), equalTo("barScriptValue")); - } - - public void testConfigMultipleParametersInlineWrongType() throws IOException { - Map config = new HashMap<>(); - config.put("foo", 1L); - config.put("bar_file", "barScriptValue"); - config.put("baz_id", "bazScriptValue"); - config.put("lang", "myLang"); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - try { - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Value must be of type String: [foo]")); - } - } - - public void testConfigMultipleParametersFileWrongType() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "fooScriptValue"); - config.put("bar_file", 1L); - config.put("baz_id", "bazScriptValue"); - config.put("lang", "myLang"); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - try { - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Value must be of type String: [bar_file]")); - } - - } - - public void testConfigMultipleParametersIndexedWrongType() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "fooScriptValue"); - config.put("bar_file", "barScriptValue"); - config.put("baz_id", 1L); - config.put("lang", "myLang"); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - try { - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Value must be of type String: [baz_id]")); - } - } - - public void testConfigMultipleParametersLangWrongType() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "fooScriptValue"); - config.put("bar_file", "barScriptValue"); - config.put("baz_id", "bazScriptValue"); - config.put("lang", 1L); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - try { - paramParser.parseConfig(config, true, ParseFieldMatcher.STRICT); - fail("Expected ScriptParseException"); - } catch (ScriptParseException e) { - assertThat(e.getMessage(), is("Value must be of type String: [lang]")); - } - } - - public void testParamsDefaultInline() throws IOException { - Map config = new HashMap<>(); - config.put("script", "scriptValue"); - MapParams params = new MapParams(config); - ScriptParameterParser paramParser = new ScriptParameterParser(); - paramParser.parseParams(params); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - - paramParser = new ScriptParameterParser(null); - paramParser.parseParams(params); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - - paramParser = new ScriptParameterParser(new HashSet()); - paramParser.parseParams(params); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - } - - public void testParamsDefaultFile() throws IOException { - Map config = new HashMap<>(); - config.put("script_file", "scriptValue"); - MapParams params = new MapParams(config); - ScriptParameterParser paramParser = new ScriptParameterParser(); - paramParser.parseParams(params); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.FILE); - assertThat(paramParser.lang(), nullValue()); - } - - public void testParamsDefaultIndexed() throws IOException { - Map config = new HashMap<>(); - config.put("script_id", "scriptValue"); - MapParams params = new MapParams(config); - ScriptParameterParser paramParser = new ScriptParameterParser(); - paramParser.parseParams(params); - assertDefaultParameterValue(paramParser, "scriptValue", ScriptType.STORED); - assertThat(paramParser.lang(), nullValue()); - } - - public void testParamsDefaultNotFound() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "bar"); - MapParams params = new MapParams(config); - ScriptParameterParser paramParser = new ScriptParameterParser(); - paramParser.parseParams(params); - assertThat(paramParser.getDefaultScriptParameterValue(), nullValue()); - assertThat(paramParser.getScriptParameterValue("script"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - } - - public void testParamsSingleParameter() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - MapParams params = new MapParams(config); - paramParser.parseParams(params); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.INLINE); - assertThat(paramParser.lang(), nullValue()); - } - - public void testParamsSingleParameterFile() throws IOException { - Map config = new HashMap<>(); - config.put("foo_file", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - MapParams params = new MapParams(config); - paramParser.parseParams(params); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.FILE); - assertThat(paramParser.lang(), nullValue()); - } - - public void testParamsSingleParameterIndexed() throws IOException { - Map config = new HashMap<>(); - config.put("foo_id", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - MapParams params = new MapParams(config); - paramParser.parseParams(params); - assertParameterValue(paramParser, "foo", "scriptValue", ScriptType.STORED); - assertThat(paramParser.lang(), nullValue()); - } - - public void testParamsSingleParameterDelcaredTwiceInlineFile() throws IOException { - Map config = new LinkedHashMap<>(); - config.put("foo", "scriptValue"); - config.put("foo_file", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - MapParams params = new MapParams(config); - try { - paramParser.parseParams(params); - fail("Expected ScriptParseException"); - } catch(ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testParamsSingleParameterDelcaredTwiceInlineIndexed() throws IOException { - Map config = new LinkedHashMap<>(); - config.put("foo", "scriptValue"); - config.put("foo_id", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - MapParams params = new MapParams(config); - try { - paramParser.parseParams(params); - fail("Expected ScriptParseException"); - } catch(ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testParamsSingleParameterDelcaredTwiceFileInline() throws IOException { - Map config = new LinkedHashMap<>(); - config.put("foo_file", "scriptValue"); - config.put("foo", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - MapParams params = new MapParams(config); - try { - paramParser.parseParams(params); - fail("Expected ScriptParseException"); - } catch(ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testParamsSingleParameterDelcaredTwiceFileIndexed() throws IOException { - Map config = new LinkedHashMap<>(); - config.put("foo_file", "scriptValue"); - config.put("foo_id", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - MapParams params = new MapParams(config); - try { - paramParser.parseParams(params); - fail("Expected ScriptParseException"); - } catch(ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testParamsSingleParameterDelcaredTwiceIndexedInline() throws IOException { - Map config = new LinkedHashMap<>(); - config.put("foo_id", "scriptValue"); - config.put("foo", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - MapParams params = new MapParams(config); - try { - paramParser.parseParams(params); - fail("Expected ScriptParseException"); - } catch(ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testParamsSingleParameterDelcaredTwiceIndexedFile() throws IOException { - Map config = new LinkedHashMap<>(); - config.put("foo_id", "scriptValue"); - config.put("foo_file", "scriptValue"); - Set parameters = Collections.singleton("foo"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - MapParams params = new MapParams(config); - try { - paramParser.parseParams(params); - fail("Expected ScriptParseException"); - } catch(ScriptParseException e) { - assertThat(e.getMessage(), is("Only one of [foo, foo_file, foo_id] is allowed.")); - } - } - - public void testParamsMultipleParameters() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "fooScriptValue"); - config.put("bar_file", "barScriptValue"); - config.put("baz_id", "bazScriptValue"); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - MapParams params = new MapParams(config); - paramParser.parseParams(params); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE); - assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.STORED); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - } - - public void testParamsMultipleParametersWithLang() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "fooScriptValue"); - config.put("bar_file", "barScriptValue"); - config.put("lang", "myLang"); - config.put("baz_id", "bazScriptValue"); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - MapParams params = new MapParams(config); - paramParser.parseParams(params); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE); - assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.STORED); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), equalTo("myLang")); - } - - public void testParamsMultipleParametersWithLangNoRemove() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "fooScriptValue"); - config.put("bar_file", "barScriptValue"); - config.put("lang", "myLang"); - config.put("baz_id", "bazScriptValue"); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - MapParams params = new MapParams(config); - paramParser.parseParams(params); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertParameterValue(paramParser, "bar", "barScriptValue", ScriptType.FILE); - assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.STORED); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), equalTo("myLang")); - } - - public void testParamsMultipleParametersNotFound() throws IOException { - Map config = new HashMap<>(); - config.put("other", "scriptValue"); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - MapParams params = new MapParams(config); - paramParser.parseParams(params); - assertThat(paramParser.getScriptParameterValue("other"), nullValue()); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - } - - public void testParamsMultipleParametersSomeNotFound() throws IOException { - Map config = new HashMap<>(); - config.put("foo", "fooScriptValue"); - config.put("other_file", "barScriptValue"); - config.put("baz_id", "bazScriptValue"); - Set parameters = new HashSet<>(); - parameters.add("foo"); - parameters.add("bar"); - parameters.add("baz"); - ScriptParameterParser paramParser = new ScriptParameterParser(parameters); - assertThat(paramParser.getScriptParameterValue("foo"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz"), nullValue()); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other_file"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - MapParams params = new MapParams(config); - paramParser.parseParams(params); - assertParameterValue(paramParser, "foo", "fooScriptValue", ScriptType.INLINE); - assertThat(paramParser.getScriptParameterValue("bar"), nullValue()); - assertParameterValue(paramParser, "baz", "bazScriptValue", ScriptType.STORED); - assertThat(paramParser.getScriptParameterValue("bar_file"), nullValue()); - assertThat(paramParser.getScriptParameterValue("baz_id"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other"), nullValue()); - assertThat(paramParser.getScriptParameterValue("other_file"), nullValue()); - assertThat(paramParser.lang(), nullValue()); - } - - private void assertDefaultParameterValue(ScriptParameterParser paramParser, String expectedScript, ScriptType expectedScriptType) throws IOException { - ScriptParameterValue defaultValue = paramParser.getDefaultScriptParameterValue(); - ScriptParameterValue defaultValueByName = paramParser.getScriptParameterValue("script"); - assertThat(defaultValue.scriptType(), equalTo(expectedScriptType)); - assertThat(defaultValue.script(), equalTo(expectedScript)); - assertThat(defaultValueByName.scriptType(), equalTo(expectedScriptType)); - assertThat(defaultValueByName.script(), equalTo(expectedScript)); - } - - private void assertParameterValue(ScriptParameterParser paramParser, String parameterName, String expectedScript, ScriptType expectedScriptType) throws IOException { - ScriptParameterValue value = paramParser.getScriptParameterValue(parameterName); - assertThat(value.scriptType(), equalTo(expectedScriptType)); - assertThat(value.script(), equalTo(expectedScript)); - } -} diff --git a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 43224488480..d9bbb85d419 100644 --- a/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/core/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -44,7 +44,6 @@ import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.List; import java.util.Map; import static org.hamcrest.CoreMatchers.containsString; @@ -444,12 +443,12 @@ public class ScriptServiceTests extends ESTestCase { int maxSize = 0xFFFF; buildScriptService(Settings.EMPTY); // allowed - scriptService.validate("_id", "test", new BytesArray("{\"script\":\"" + randomAsciiOfLength(maxSize - 13) + "\"}")); + scriptService.validateStoredScript("_id", "test", new BytesArray("{\"script\":\"" + randomAsciiOfLength(maxSize - 13) + "\"}")); // disallowed IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - scriptService.validate("_id", "test", new BytesArray("{\"script\":\"" + randomAsciiOfLength(maxSize - 12) + "\"}")); + scriptService.validateStoredScript("_id", "test", new BytesArray("{\"script\":\"" + randomAsciiOfLength(maxSize - 12) + "\"}")); }); assertThat(e.getMessage(), equalTo( "Limit of script size in bytes [" + maxSize+ "] has been exceeded for script [_id] with size [" + (maxSize + 1) + "]")); diff --git a/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java b/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java index d8d6b0f5409..6ae607e7b8e 100644 --- a/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java +++ b/core/src/test/java/org/elasticsearch/script/StoredScriptsIT.java @@ -24,13 +24,16 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; public class StoredScriptsIT extends ESIntegTestCase { - private final static int SCRIPT_MAX_SIZE_IN_BYTES = 64; - private final static String LANG = MockScriptEngine.NAME; + private static final int SCRIPT_MAX_SIZE_IN_BYTES = 64; + private static final String LANG = MockScriptEngine.NAME; @Override protected Settings nodeSettings(int nodeOrdinal) { @@ -41,7 +44,7 @@ public class StoredScriptsIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(MockScriptEngine.TestPlugin.class); + return pluginList(CustomScriptPlugin.class); } public void testBasics() { @@ -79,4 +82,11 @@ public class StoredScriptsIT extends ESIntegTestCase { assertEquals("Limit of script size in bytes [64] has been exceeded for script [foobar] with size [65]", e.getMessage()); } + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + protected Map, Object>> pluginScripts() { + return Collections.emptyMap(); + } + } } diff --git a/core/src/test/java/org/elasticsearch/search/DocValueFormatTests.java b/core/src/test/java/org/elasticsearch/search/DocValueFormatTests.java index 2ca255ea1a3..192f40d4b2b 100644 --- a/core/src/test/java/org/elasticsearch/search/DocValueFormatTests.java +++ b/core/src/test/java/org/elasticsearch/search/DocValueFormatTests.java @@ -43,13 +43,13 @@ public class DocValueFormatTests extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); out.writeNamedWriteable(DocValueFormat.BOOLEAN); - StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry); + StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry); assertSame(DocValueFormat.BOOLEAN, in.readNamedWriteable(DocValueFormat.class)); DocValueFormat.Decimal decimalFormat = new DocValueFormat.Decimal("###.##"); out = new BytesStreamOutput(); out.writeNamedWriteable(decimalFormat); - in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry); + in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry); DocValueFormat vf = in.readNamedWriteable(DocValueFormat.class); assertEquals(DocValueFormat.Decimal.class, vf.getClass()); assertEquals("###.##", ((DocValueFormat.Decimal) vf).pattern); @@ -57,7 +57,7 @@ public class DocValueFormatTests extends ESTestCase { DocValueFormat.DateTime dateFormat = new DocValueFormat.DateTime(Joda.forPattern("epoch_second"), DateTimeZone.forOffsetHours(1)); out = new BytesStreamOutput(); out.writeNamedWriteable(dateFormat); - in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry); + in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry); vf = in.readNamedWriteable(DocValueFormat.class); assertEquals(DocValueFormat.DateTime.class, vf.getClass()); assertEquals("epoch_second", ((DocValueFormat.DateTime) vf).formatter.format()); @@ -65,17 +65,17 @@ public class DocValueFormatTests extends ESTestCase { out = new BytesStreamOutput(); out.writeNamedWriteable(DocValueFormat.GEOHASH); - in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry); + in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry); assertSame(DocValueFormat.GEOHASH, in.readNamedWriteable(DocValueFormat.class)); out = new BytesStreamOutput(); out.writeNamedWriteable(DocValueFormat.IP); - in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry); + in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry); assertSame(DocValueFormat.IP, in.readNamedWriteable(DocValueFormat.class)); out = new BytesStreamOutput(); out.writeNamedWriteable(DocValueFormat.RAW); - in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry); + in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry); assertSame(DocValueFormat.RAW, in.readNamedWriteable(DocValueFormat.class)); } diff --git a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index a4837e382ac..5caba0fb441 100644 --- a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -753,35 +753,35 @@ public class MultiValueModeTests extends ESTestCase { public void testWriteTo() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { MultiValueMode.SUM.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { MultiValueMode.AVG.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(1)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { MultiValueMode.MEDIAN.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(2)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { MultiValueMode.MIN.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(3)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { MultiValueMode.MAX.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(4)); } } @@ -790,35 +790,35 @@ public class MultiValueModeTests extends ESTestCase { public void testReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(0); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.SUM)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(1); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.AVG)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(2); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.MEDIAN)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(3); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.MIN)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(4); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(MultiValueMode.readMultiValueModeFrom(in), equalTo(MultiValueMode.MAX)); } } diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 72873bc0d48..839a42aa091 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -27,75 +27,138 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.query.functionscore.GaussDecayFunctionBuilder; import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; +import org.elasticsearch.search.aggregations.pipeline.movavg.models.MovAvgModel; +import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel; +import org.elasticsearch.search.fetch.FetchSubPhase; +import org.elasticsearch.search.fetch.explain.ExplainFetchSubPhase; import org.elasticsearch.search.highlight.CustomHighlighter; import org.elasticsearch.search.highlight.FastVectorHighlighter; +import org.elasticsearch.search.highlight.Highlighter; import org.elasticsearch.search.highlight.PlainHighlighter; import org.elasticsearch.search.highlight.PostingsHighlighter; import org.elasticsearch.search.suggest.CustomSuggester; +import org.elasticsearch.search.suggest.Suggester; +import org.elasticsearch.search.suggest.completion.CompletionSuggester; import org.elasticsearch.search.suggest.phrase.PhraseSuggester; +import org.elasticsearch.search.suggest.term.TermSuggester; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.notNullValue; public class SearchModuleTests extends ModuleTestCase { - public void testDoubleRegister() { - SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); - try { - module.registerHighlighter("fvh", new PlainHighlighter()); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Can't register the same [highlighter] more than once for [fvh]"); - } + public void testDoubleRegister() { + SearchPlugin registersDupeHighlighter = new SearchPlugin() { + @Override + public Map getHighlighters() { + return singletonMap("plain", new PlainHighlighter()); + } + }; + expectThrows(IllegalArgumentException.class, + () -> new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, singletonList(registersDupeHighlighter))); - try { - module.registerSuggester("term", PhraseSuggester.INSTANCE); - } catch (IllegalArgumentException e) { - assertEquals(e.getMessage(), "Can't register the same [suggester] more than once for [term]"); - } - } + SearchPlugin registersDupeSuggester = new SearchPlugin() { + @Override + public Map> getSuggesters() { + return singletonMap("term", TermSuggester.INSTANCE); + } + }; + expectThrows(IllegalArgumentException.class, + () -> new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, singletonList(registersDupeSuggester))); + + SearchPlugin registersDupeScoreFunction = new SearchPlugin() { + @Override + public List> getScoreFunctions() { + return singletonList(new ScoreFunctionSpec<>(GaussDecayFunctionBuilder.NAME, GaussDecayFunctionBuilder::new, + GaussDecayFunctionBuilder.PARSER)); + } + }; + expectThrows(IllegalArgumentException.class, + () -> new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, singletonList(registersDupeScoreFunction))); + + SearchPlugin registersDupeSignificanceHeuristic = new SearchPlugin() { + @Override + public List> getSignificanceHeuristics() { + return singletonList(new SearchExtensionSpec<>(ChiSquare.NAME, ChiSquare::new, ChiSquare.PARSER)); + } + }; + expectThrows(IllegalArgumentException.class, () -> new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, + singletonList(registersDupeSignificanceHeuristic))); + + SearchPlugin registersDupeMovAvgModel = new SearchPlugin() { + @Override + public List> getMovingAverageModels() { + return singletonList(new SearchExtensionSpec<>(SimpleModel.NAME, SimpleModel::new, SimpleModel.PARSER)); + } + }; + expectThrows(IllegalArgumentException.class, () -> new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, + singletonList(registersDupeMovAvgModel))); + + SearchPlugin registersDupeFetchSubPhase = new SearchPlugin() { + @Override + public List getFetchSubPhases(FetchPhaseConstructionContext context) { + return singletonList(new ExplainFetchSubPhase()); + } + }; + expectThrows(IllegalArgumentException.class, () -> new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, + singletonList(registersDupeFetchSubPhase))); + + SearchPlugin registersDupeFetchQuery = new SearchPlugin() { + public List> getQueries() { + return singletonList(new QuerySpec<>(TermQueryBuilder.NAME, TermQueryBuilder::new, TermQueryBuilder::fromXContent)); + } + }; + expectThrows(IllegalArgumentException.class, () -> new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, + singletonList(registersDupeFetchQuery))); + } public void testRegisterSuggester() { - SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); - module.registerSuggester("custom", CustomSuggester.INSTANCE); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> module.registerSuggester("custom", CustomSuggester.INSTANCE)); - assertEquals("Can't register the same [suggester] more than once for [custom]", e.getMessage()); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, singletonList(new SearchPlugin() { + @Override + public Map> getSuggesters() { + return singletonMap("custom", CustomSuggester.INSTANCE); + } + })); + assertSame(TermSuggester.INSTANCE, module.getSuggesters().getSuggester("term")); + assertSame(PhraseSuggester.INSTANCE, module.getSuggesters().getSuggester("phrase")); + assertSame(CompletionSuggester.INSTANCE, module.getSuggesters().getSuggester("completion")); + assertSame(CustomSuggester.INSTANCE, module.getSuggesters().getSuggester("custom")); } public void testRegisterHighlighter() { - SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); CustomHighlighter customHighlighter = new CustomHighlighter(); - module.registerHighlighter("custom", customHighlighter); - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, - () -> module.registerHighlighter("custom", new CustomHighlighter())); - assertEquals("Can't register the same [highlighter] more than once for [custom]", exception.getMessage()); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, singletonList(new SearchPlugin() { + @Override + public Map getHighlighters() { + return singletonMap("custom", customHighlighter); + } + })); - exception = expectThrows(IllegalArgumentException.class, - () -> module.registerHighlighter("custom", null)); - assertEquals("Can't register null highlighter for key: [custom]", exception.getMessage()); - Highlighters highlighters = module.getHighlighters(); - assertEquals(highlighters.get("fvh").getClass(), FastVectorHighlighter.class); - assertEquals(highlighters.get("plain").getClass(), PlainHighlighter.class); - assertEquals(highlighters.get("postings").getClass(), PostingsHighlighter.class); + Map highlighters = module.getHighlighters(); + assertEquals(FastVectorHighlighter.class, highlighters.get("fvh").getClass()); + assertEquals(PlainHighlighter.class, highlighters.get("plain").getClass()); + assertEquals(PostingsHighlighter.class, highlighters.get("postings").getClass()); assertSame(highlighters.get("custom"), customHighlighter); } - public void testRegisterQueryParserDuplicate() { - SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> module - .registerQuery(TermQueryBuilder::new, TermQueryBuilder::fromXContent, TermQueryBuilder.QUERY_NAME_FIELD)); - assertThat(e.getMessage(), containsString("] already registered for [query][term] while trying to register [org.elasticsearch.")); - } - public void testRegisteredQueries() throws IOException { - SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); + SearchModule module = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, emptyList()); List allSupportedQueries = new ArrayList<>(); Collections.addAll(allSupportedQueries, NON_DEPRECATED_QUERIES); Collections.addAll(allSupportedQueries, DEPRECATED_QUERIES); @@ -165,7 +228,6 @@ public class SearchModuleTests extends ModuleTestCase { "span_or", "span_term", "span_within", - "template", "term", "terms", "type", diff --git a/core/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/core/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 1c63ac7b62a..fba71499cc2 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.search; import org.apache.lucene.search.Query; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -34,12 +33,15 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.test.ESSingleNodeTestCase; import java.io.IOException; import java.util.Collection; +import java.util.List; import java.util.concurrent.ExecutionException; +import static java.util.Collections.singletonList; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.is; @@ -110,12 +112,12 @@ public class SearchServiceTests extends ESSingleNodeTestCase { assertEquals(activeRefs, indexShard.store().refCount()); } - public static class FailOnRewriteQueryPlugin extends Plugin { - - public void onModule(SearchModule module) { - module.registerQuery(FailOnRewriteQueryBuilder::new, parseContext -> { + public static class FailOnRewriteQueryPlugin extends Plugin implements SearchPlugin { + @Override + public List> getQueries() { + return singletonList(new QuerySpec<>("fail_on_rewrite_query", FailOnRewriteQueryBuilder::new, parseContext -> { throw new UnsupportedOperationException("No query parser for this plugin"); - }, new ParseField("fail_on_rewrite_query")); + })); } } diff --git a/core/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java b/core/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java index 2bb39ad10ea..6542bad5b8a 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/SearchWithRejectionsIT.java @@ -65,7 +65,7 @@ public class SearchWithRejectionsIT extends ESIntegTestCase { for (int i = 0; i < numSearches; i++) { try { responses[i].get(); - } catch (Throwable t) { + } catch (Exception t) { } } awaitBusy(() -> client().admin().indices().prepareStats().execute().actionGet().getTotal().getSearch().getOpenContexts() == 0, 1, TimeUnit.SECONDS); diff --git a/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java b/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java index 9ea5ec93f1f..95d873b3e7a 100644 --- a/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java +++ b/core/src/test/java/org/elasticsearch/search/StressSearchServiceReaperIT.java @@ -52,7 +52,6 @@ public class StressSearchServiceReaperIT extends ESIntegTestCase { } createIndex("test"); indexRandom(true, builders); - ensureYellow(); final int iterations = scaledRandomIntBetween(500, 1000); for (int i = 0; i < iterations; i++) { SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).setSize(num).get(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java new file mode 100644 index 00000000000..4a0369a4019 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java @@ -0,0 +1,101 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations; + +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.script.MockScriptPlugin; +import org.elasticsearch.script.Script; + +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; + +import static java.util.Collections.singletonMap; +import static org.elasticsearch.script.ScriptService.ScriptType; + +/** + * This class contains various mocked scripts that are used in aggregations integration tests. + */ +public class AggregationTestScriptsPlugin extends MockScriptPlugin { + + // Equivalent to: + // + // List values = doc['values'].values; + // double[] res = new double[values.size()]; + // for (int i = 0; i < res.length; i++) { + // res[i] = values.get(i) - dec; + // }; + // return res; + public static final Script DECREMENT_ALL_VALUES = new Script("decrement all values", ScriptType.INLINE, NAME, singletonMap("dec", 1)); + + @Override + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("20 - _value", vars -> { + double value = (double) vars.get("_value"); + return 20.0d - value; + }); + + scripts.put("_value - 1", vars -> { + double value = (double) vars.get("_value"); + return value - 1.0d; + }); + + scripts.put("_value - dec", vars -> { + double value = (double) vars.get("_value"); + int dec = (int) vars.get("dec"); + return value - dec; + }); + + scripts.put("doc['value'].value", vars -> { + Map doc = (Map) vars.get("doc"); + return doc.get("value"); + }); + + scripts.put("doc['value'].value - dec", vars -> { + int dec = (int) vars.get("dec"); + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("value"); + return value.getValue() - dec; + }); + + scripts.put("doc['values'].values", vars -> { + Map doc = (Map) vars.get("doc"); + return doc.get("values"); + }); + + scripts.put(DECREMENT_ALL_VALUES.getScript(), vars -> { + int dec = (int) vars.get("dec"); + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Longs values = (ScriptDocValues.Longs) doc.get("values"); + + double[] res = new double[values.size()]; + for (int i = 0; i < res.length; i++) { + res[i] = values.get(i) - dec; + } + return res; + }); + + scripts.put("_value * -1", vars -> (double) vars.get("_value") * -1); + + return scripts; + } +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java index c7384c5aa2d..c8eb475c968 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java @@ -41,7 +41,6 @@ public class AggregationsIntegrationIT extends ESIntegTestCase { @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("index").addMapping("type", "f", "type=keyword").get()); - ensureYellow("index"); numDocs = randomIntBetween(1, 20); List docs = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java index 1a21069623d..2adaf408ed1 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java @@ -62,6 +62,7 @@ import java.util.Random; import java.util.regex.Matcher; import java.util.regex.Pattern; +import static java.util.Collections.emptyList; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.containsString; @@ -119,7 +120,7 @@ public class AggregatorParsingTests extends ESTestCase { protected void configure() { bindMapperExtension(); } - }, new SearchModule(settings, namedWriteableRegistry) { + }, new SearchModule(settings, namedWriteableRegistry, false, emptyList()) { @Override protected void configureSearch() { // Skip me diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index 4e0429fca87..9499603cf3a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -63,6 +63,7 @@ import java.io.IOException; import java.util.Collections; import java.util.List; +import static java.util.Collections.emptyList; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.Matchers.equalTo; @@ -151,7 +152,7 @@ public abstract class BaseAggregationTestCase> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + @Override + protected Map, Object>> pluginScripts() { + return Collections.singletonMap("floor(_value / interval)", vars -> { + Double value = (Double) vars.get("_value"); + Integer interval = (Integer) vars.get("interval"); + return Math.floor(value / interval.doubleValue()); + }); + } } // Make sure that unordered, reversed, disjoint and/or overlapping ranges are supported @@ -102,7 +114,9 @@ public class EquivalenceTests extends ESIntegTestCase { source = source.endArray().endObject(); client().prepareIndex("idx", "type").setSource(source).execute().actionGet(); } - assertNoFailures(client().admin().indices().prepareRefresh("idx").setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get()); + assertNoFailures(client().admin().indices().prepareRefresh("idx"). + setIndicesOptions(IndicesOptions.lenientExpandOpen()) + .get()); final int numRanges = randomIntBetween(1, 20); final double[][] ranges = new double[numRanges][]; @@ -234,20 +248,48 @@ public class EquivalenceTests extends ESIntegTestCase { } indexRandom(true, indexingRequests); - assertNoFailures(client().admin().indices().prepareRefresh("idx").setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get()); + assertNoFailures(client().admin().indices().prepareRefresh("idx") + .setIndicesOptions(IndicesOptions.lenientExpandOpen()) + .execute().get()); TermsAggregatorFactory.ExecutionMode[] globalOrdinalModes = new TermsAggregatorFactory.ExecutionMode[] { TermsAggregatorFactory.ExecutionMode.GLOBAL_ORDINALS_HASH, TermsAggregatorFactory.ExecutionMode.GLOBAL_ORDINALS }; SearchResponse resp = client().prepareSearch("idx") - .addAggregation(terms("long").field("long_values").size(maxNumTerms).collectMode(randomFrom(SubAggCollectionMode.values())).subAggregation(min("min").field("num"))) - .addAggregation(terms("double").field("double_values").size(maxNumTerms).collectMode(randomFrom(SubAggCollectionMode.values())).subAggregation(max("max").field("num"))) - .addAggregation(terms("string_map").field("string_values").collectMode(randomFrom(SubAggCollectionMode.values())) - .executionHint(TermsAggregatorFactory.ExecutionMode.MAP.toString()).size(maxNumTerms) - .subAggregation(stats("stats").field("num"))) - .addAggregation(terms("string_global_ordinals").field("string_values").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(globalOrdinalModes[randomInt(globalOrdinalModes.length - 1)].toString()).size(maxNumTerms).subAggregation(extendedStats("stats").field("num"))) - .addAggregation(terms("string_global_ordinals_doc_values").field("string_values.doc_values").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(globalOrdinalModes[randomInt(globalOrdinalModes.length - 1)].toString()).size(maxNumTerms).subAggregation(extendedStats("stats").field("num"))) + .addAggregation( + terms("long") + .field("long_values") + .size(maxNumTerms) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(min("min").field("num"))) + .addAggregation( + terms("double") + .field("double_values") + .size(maxNumTerms) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(max("max").field("num"))) + .addAggregation( + terms("string_map") + .field("string_values") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(TermsAggregatorFactory.ExecutionMode.MAP.toString()) + .size(maxNumTerms) + .subAggregation(stats("stats").field("num"))) + .addAggregation( + terms("string_global_ordinals") + .field("string_values") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(globalOrdinalModes[randomInt(globalOrdinalModes.length - 1)].toString()) + .size(maxNumTerms) + .subAggregation(extendedStats("stats").field("num"))) + .addAggregation( + terms("string_global_ordinals_doc_values") + .field("string_values.doc_values") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(globalOrdinalModes[randomInt(globalOrdinalModes.length - 1)].toString()) + .size(maxNumTerms) + .subAggregation(extendedStats("stats").field("num"))) .execute().actionGet(); assertAllSuccessful(resp); assertEquals(numDocs, resp.getHits().getTotalHits()); @@ -304,15 +346,25 @@ public class EquivalenceTests extends ESIntegTestCase { source = source.endArray().endObject(); client().prepareIndex("idx", "type").setSource(source).execute().actionGet(); } - assertNoFailures(client().admin().indices().prepareRefresh("idx").setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get()); + assertNoFailures(client().admin().indices().prepareRefresh("idx") + .setIndicesOptions(IndicesOptions.lenientExpandOpen()) + .execute().get()); Map params = new HashMap<>(); params.put("interval", interval); + SearchResponse resp = client().prepareSearch("idx") .addAggregation( - terms("terms").field("values").collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script("floor(_value / interval)", ScriptType.INLINE, null, params)).size(maxNumTerms)) - .addAggregation(histogram("histo").field("values").interval(interval).minDocCount(1)) + terms("terms") + .field("values") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script("floor(_value / interval)", ScriptType.INLINE, CustomScriptPlugin.NAME, params)) + .size(maxNumTerms)) + .addAggregation( + histogram("histo") + .field("values") + .interval(interval) + .minDocCount(1)) .execute().actionGet(); assertSearchResponse(resp); @@ -341,7 +393,13 @@ public class EquivalenceTests extends ESIntegTestCase { } indexRandom(true, indexingRequests); - SearchResponse response = client().prepareSearch("idx").addAggregation(terms("terms").field("double_value").collectMode(randomFrom(SubAggCollectionMode.values())).subAggregation(percentiles("pcts").field("double_value"))).execute().actionGet(); + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + terms("terms") + .field("double_value") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(percentiles("pcts").field("double_value"))) + .execute().actionGet(); assertAllSuccessful(response); assertEquals(numDocs, response.getHits().getTotalHits()); } @@ -351,7 +409,6 @@ public class EquivalenceTests extends ESIntegTestCase { createIndex("idx"); final int value = randomIntBetween(0, 10); indexRandom(true, client().prepareIndex("idx", "type").setSource("f", value)); - ensureYellow("idx"); // only one document let's make sure all shards have an active primary SearchResponse response = client().prepareSearch("idx") .addAggregation(filter("filter", QueryBuilders.matchAllQuery()) .subAggregation(range("range") diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/SubAggCollectionModeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/SubAggCollectionModeTests.java index 5ed3fe39970..9e20d313848 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/SubAggCollectionModeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/SubAggCollectionModeTests.java @@ -39,14 +39,14 @@ public class SubAggCollectionModeTests extends ESTestCase { public void testwriteTo() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { SubAggCollectionMode.DEPTH_FIRST.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { SubAggCollectionMode.BREADTH_FIRST.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(1)); } } @@ -55,13 +55,13 @@ public class SubAggCollectionModeTests extends ESTestCase { public void testReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(0); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(SubAggCollectionMode.readFromStream(in), equalTo(SubAggCollectionMode.DEPTH_FIRST)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(1); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(SubAggCollectionMode.readFromStream(in), equalTo(SubAggCollectionMode.BREADTH_FIRST)); } } @@ -70,7 +70,7 @@ public class SubAggCollectionModeTests extends ESTestCase { public void testInvalidReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE)); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { SubAggCollectionMode.readFromStream(in); fail("Expected IOException"); } catch(IOException e) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java index b22f7524e32..f4234423531 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ChildrenIT.java @@ -61,7 +61,7 @@ import static org.hamcrest.Matchers.sameInstance; @ESIntegTestCase.SuiteScopeTestCase public class ChildrenIT extends ESIntegTestCase { - private final static Map categoryToControl = new HashMap<>(); + private static final Map categoryToControl = new HashMap<>(); @Override public void setupSuiteScopeCluster() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java index 74ea18cc1d1..ab196632a20 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; +import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBoundsTests; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order; public class DateHistogramTests extends BaseAggregationTestCase { @@ -62,9 +63,7 @@ public class DateHistogramTests extends BaseAggregationTestCase> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("_value + 1", vars -> { + long value = (long) vars.get("_value"); + return value + 1L; + }); + + scripts.put("doc['l_value'].value", vars -> { + Map doc = (Map) vars.get("doc"); + return doc.get(SINGLE_VALUED_FIELD_NAME); + }); + + scripts.put("doc['l_values']", vars -> { + Map doc = (Map) vars.get("doc"); + return doc.get(MULTI_VALUED_FIELD_NAME); + }); + + return scripts; + } } @Override @@ -349,15 +377,19 @@ public class HistogramTests extends ESIntegTestCase { } } assertThat(sum.getValue(), equalTo((double) s)); - assertThat((long) propertiesKeys[i], equalTo((long) i * interval)); - assertThat((long) propertiesDocCounts[i], equalTo(valueCounts[i])); - assertThat((double) propertiesCounts[i], equalTo((double) s)); + assertEquals(propertiesKeys[i], (long) i * interval); + assertThat(propertiesDocCounts[i], equalTo(valueCounts[i])); + assertThat(propertiesCounts[i], equalTo((double) s)); } } public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("sum", true)) + .addAggregation( + histogram("histo") + .field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(Histogram.Order.aggregation("sum", true)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); @@ -397,7 +429,11 @@ public class HistogramTests extends ESIntegTestCase { public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("sum", false)) + .addAggregation( + histogram("histo") + .field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(Histogram.Order.aggregation("sum", false)) .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); @@ -437,7 +473,11 @@ public class HistogramTests extends ESIntegTestCase { public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("stats.sum", false)) + .addAggregation( + histogram("histo") + .field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(Histogram.Order.aggregation("stats.sum", false)) .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))) .execute().actionGet(); @@ -478,9 +518,13 @@ public class HistogramTests extends ESIntegTestCase { public void testSingleValuedFieldOrderedBySubAggregationDescDeepOrderPath() throws Exception { boolean asc = randomBoolean(); SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.aggregation("filter>max", asc)) + .addAggregation( + histogram("histo") + .field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(Histogram.Order.aggregation("filter>max", asc)) .subAggregation(filter("filter", matchAllQuery()) - .subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)))) + .subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)))) .execute().actionGet(); assertSearchResponse(response); @@ -515,7 +559,11 @@ public class HistogramTests extends ESIntegTestCase { public void testSingleValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).script(new Script("_value + 1")).interval(interval)) + .addAggregation( + histogram("histo") + .field(SINGLE_VALUED_FIELD_NAME) + .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap())) + .interval(interval)) .execute().actionGet(); assertSearchResponse(response); @@ -588,7 +636,11 @@ public class HistogramTests extends ESIntegTestCase { public void testMultiValuedFieldWithValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).script(new Script("_value + 1")).interval(interval)) + .addAggregation( + histogram("histo") + .field(MULTI_VALUED_FIELD_NAME) + .script(new Script("_value + 1", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap())) + .interval(interval)) .execute().actionGet(); assertSearchResponse(response); @@ -621,7 +673,10 @@ public class HistogramTests extends ESIntegTestCase { public void testScriptSingleValue() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").script(new Script("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value")).interval(interval)) + .addAggregation( + histogram("histo") + .script(new Script("doc['l_value'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap())) + .interval(interval)) .execute().actionGet(); assertSearchResponse(response); @@ -642,7 +697,10 @@ public class HistogramTests extends ESIntegTestCase { public void testScriptMultiValued() throws Exception { SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").script(new Script("doc['" + MULTI_VALUED_FIELD_NAME + "']")).interval(interval)) + .addAggregation( + histogram("histo") + .script(new Script("doc['l_values']", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap())) + .interval(interval)) .execute().actionGet(); assertSearchResponse(response); @@ -701,8 +759,11 @@ public class HistogramTests extends ESIntegTestCase { SearchResponse response = client() .prepareSearch("idx", "idx_unmapped") .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) - .extendedBounds(new ExtendedBounds((long) -1 * 2 * interval, (long) valueCounts.length * interval))).execute().actionGet(); + histogram("histo") + .field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(new ExtendedBounds((long) -1 * 2 * interval, (long) valueCounts.length * interval))) + .get(); assertSearchResponse(response); @@ -858,7 +919,7 @@ public class HistogramTests extends ESIntegTestCase { // constructing the newly expected bucket list int bucketsCount = (int) ((boundsMaxKey - boundsMinKey) / interval) + 1; - long[] extendedValueCounts = new long[bucketsCount]; + long[] extendedValueCounts = new long[valueCounts.length + addedBucketsLeft + addedBucketsRight]; System.arraycopy(valueCounts, 0, extendedValueCounts, addedBucketsLeft, valueCounts.length); SearchResponse response = null; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java index ac0d6d0df8b..54d443b3874 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; +import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBoundsTests; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; @@ -32,9 +33,7 @@ public class HistogramTests extends BaseAggregationTestCasetopWords=new HashMap<>(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index 34d55ee749a..1027a1097a1 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -31,11 +30,10 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.script.NativeScriptFactory; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.script.NativeSignificanceScoreScriptNoParams; @@ -47,6 +45,7 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ScriptHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; +import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.test.ESIntegTestCase; @@ -62,6 +61,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; +import static java.util.Collections.singletonList; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; @@ -167,20 +167,22 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { } } - public static class CustomSignificanceHeuristicPlugin extends Plugin implements ScriptPlugin { - - public void onModule(SearchModule searchModule) { - searchModule.registerSignificanceHeuristic(SimpleHeuristic.NAMES_FIELD, SimpleHeuristic::new, SimpleHeuristic::parse); + public static class CustomSignificanceHeuristicPlugin extends Plugin implements ScriptPlugin, SearchPlugin { + @Override + public List> getSignificanceHeuristics() { + return singletonList(new SearchExtensionSpec(SimpleHeuristic.NAME, + SimpleHeuristic::new, SimpleHeuristic::parse)); } @Override public List getNativeScripts() { - return Arrays.asList(new NativeSignificanceScoreScriptNoParams.Factory(), new NativeSignificanceScoreScriptWithParams.Factory()); + return Arrays.asList(new NativeSignificanceScoreScriptNoParams.Factory(), + new NativeSignificanceScoreScriptWithParams.Factory()); } } public static class SimpleHeuristic extends SignificanceHeuristic { - public static final ParseField NAMES_FIELD = new ParseField("simple"); + public static final String NAME = "simple"; public SimpleHeuristic() { } @@ -199,12 +201,12 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { @Override public String getWriteableName() { - return NAMES_FIELD.getPreferredName(); + return NAME; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAMES_FIELD.getPreferredName()).endObject(); + builder.startObject(NAME).endObject(); return builder; } @@ -449,7 +451,6 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase { public void testScriptScore() throws ExecutionException, InterruptedException, IOException { indexRandomFrequencies01(randomBoolean() ? "text" : "long"); ScriptHeuristic scriptHeuristic = getScriptSignificanceHeuristic(); - ensureYellow(); SearchResponse response = client().prepareSearch(INDEX_NAME) .addAggregation(terms("class").field(CLASS_FIELD) .subAggregation(significantTerms("mySignificantTerms") diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index dad55a20828..2ea319daa9c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -59,7 +59,6 @@ public class TermsShardMinDocCountIT extends ESIntegTestCase { } assertAcked(prepareCreate(index).setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0) .addMapping(type, "text", textMappings)); - ensureYellow(index); List indexBuilders = new ArrayList<>(); addTermsDocs("1", 1, 0, indexBuilders);//high score but low doc freq @@ -119,7 +118,6 @@ public class TermsShardMinDocCountIT extends ESIntegTestCase { termMappings += ",fielddata=true"; } assertAcked(prepareCreate(index).setSettings(SETTING_NUMBER_OF_SHARDS, 1, SETTING_NUMBER_OF_REPLICAS, 0).addMapping(type, "text", termMappings)); - ensureYellow(index); List indexBuilders = new ArrayList<>(); addTermsDocs("1", 1, indexBuilders);//low doc freq but high score diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java new file mode 100644 index 00000000000..7a586284261 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/ExtendedBoundsTests.java @@ -0,0 +1,161 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.joda.FormatDateTimeFormatter; +import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.SearchParseException; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTimeZone; +import org.joda.time.Instant; + +import java.io.IOException; + +import static java.lang.Math.max; +import static java.lang.Math.min; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class ExtendedBoundsTests extends ESTestCase { + /** + * Construct a random {@link ExtendedBounds}. + */ + public static ExtendedBounds randomExtendedBounds() { + ExtendedBounds bounds = randomParsedExtendedBounds(); + if (randomBoolean()) { + bounds = unparsed(bounds); + } + return bounds; + } + + /** + * Construct a random {@link ExtendedBounds} in pre-parsed form. + */ + public static ExtendedBounds randomParsedExtendedBounds() { + if (randomBoolean()) { + // Construct with one missing bound + if (randomBoolean()) { + return new ExtendedBounds(null, randomLong()); + } + return new ExtendedBounds(randomLong(), null); + } + long a = randomLong(); + long b; + do { + b = randomLong(); + } while (a == b); + long min = min(a, b); + long max = max(a, b); + return new ExtendedBounds(min, max); + } + + /** + * Convert an extended bounds in parsed for into one in unparsed form. + */ + public static ExtendedBounds unparsed(ExtendedBounds template) { + // It'd probably be better to randomize the formatter + FormatDateTimeFormatter formatter = Joda.forPattern("dateOptionalTime"); + String minAsStr = template.getMin() == null ? null : formatter.printer().print(new Instant(template.getMin())); + String maxAsStr = template.getMax() == null ? null : formatter.printer().print(new Instant(template.getMax())); + return new ExtendedBounds(minAsStr, maxAsStr); + } + + public void testParseAndValidate() { + long now = randomLong(); + SearchContext context = mock(SearchContext.class); + when(context.nowInMillis()).thenReturn(now); + FormatDateTimeFormatter formatter = Joda.forPattern("dateOptionalTime"); + DocValueFormat format = new DocValueFormat.DateTime(formatter, DateTimeZone.UTC); + + ExtendedBounds expected = randomParsedExtendedBounds(); + ExtendedBounds parsed = unparsed(expected).parseAndValidate("test", context, format); + // parsed won't *equal* expected because equal includes the String parts + assertEquals(expected.getMin(), parsed.getMin()); + assertEquals(expected.getMax(), parsed.getMax()); + + parsed = new ExtendedBounds("now", null).parseAndValidate("test", context, format); + assertEquals(now, (long) parsed.getMin()); + assertNull(parsed.getMax()); + + parsed = new ExtendedBounds(null, "now").parseAndValidate("test", context, format); + assertNull(parsed.getMin()); + assertEquals(now, (long) parsed.getMax()); + + SearchParseException e = expectThrows(SearchParseException.class, + () -> new ExtendedBounds(100L, 90L).parseAndValidate("test", context, format)); + assertEquals("[extended_bounds.min][100] cannot be greater than [extended_bounds.max][90] for histogram aggregation [test]", + e.getMessage()); + + e = expectThrows(SearchParseException.class, + () -> unparsed(new ExtendedBounds(100L, 90L)).parseAndValidate("test", context, format)); + assertEquals("[extended_bounds.min][100] cannot be greater than [extended_bounds.max][90] for histogram aggregation [test]", + e.getMessage()); + } + + public void testTransportRoundTrip() throws IOException { + ExtendedBounds orig = randomExtendedBounds(); + + BytesReference origBytes; + try (BytesStreamOutput out = new BytesStreamOutput()) { + orig.writeTo(out); + origBytes = out.bytes(); + } + + ExtendedBounds read; + try (StreamInput in = origBytes.streamInput()) { + read = new ExtendedBounds(in); + assertEquals("read fully", 0, in.available()); + } + assertEquals(orig, read); + + BytesReference readBytes; + try (BytesStreamOutput out = new BytesStreamOutput()) { + read.writeTo(out); + readBytes = out.bytes(); + } + + assertEquals(origBytes, readBytes); + } + + public void testXContentRoundTrip() throws Exception { + ExtendedBounds orig = randomExtendedBounds(); + + try (XContentBuilder out = JsonXContent.contentBuilder()) { + orig.toXContent(out, ToXContent.EMPTY_PARAMS); + try (XContentParser in = JsonXContent.jsonXContent.createParser(out.bytes())) { + in.nextToken(); + ExtendedBounds read = ExtendedBounds.PARSER.apply(in, () -> ParseFieldMatcher.STRICT); + assertEquals(orig, read); + } catch (Exception e) { + throw new Exception("Error parsing [" + out.bytes().utf8ToString() + "]", e); + } + } + } +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index 8dc015b30ed..e6c45aae1ab 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -58,21 +58,23 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; import java.util.List; +import java.util.function.BiFunction; +import java.util.stream.Collectors; +import java.util.stream.IntStream; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonList; import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; -/** - * - */ public class SignificanceHeuristicTests extends ESTestCase { static class SignificantTermsTestSearchContext extends TestSearchContext { @@ -94,26 +96,26 @@ public class SignificanceHeuristicTests extends ESTestCase { // test that stream output can actually be read - does not replace bwc test public void testStreamResponse() throws Exception { Version version = randomVersion(random()); - InternalSignificantTerms[] sigTerms = getRandomSignificantTerms(getRandomSignificanceheuristic()); + InternalMappedSignificantTerms sigTerms = getRandomSignificantTerms(getRandomSignificanceheuristic()); // write ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); out.setVersion(version); - sigTerms[0].writeTo(out); + out.writeNamedWriteable(sigTerms); // read ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); StreamInput in = new InputStreamStreamInput(inBuffer); NamedWriteableRegistry registry = new NamedWriteableRegistry(); - new SearchModule(Settings.EMPTY, registry); // populates the registry through side effects + new SearchModule(Settings.EMPTY, registry, false, emptyList()); // populates the registry through side effects in = new NamedWriteableAwareStreamInput(in, registry); in.setVersion(version); - sigTerms[1].readFrom(in); + InternalMappedSignificantTerms read = (InternalMappedSignificantTerms) in.readNamedWriteable(InternalAggregation.class); - assertTrue(sigTerms[1].significanceHeuristic.equals(sigTerms[0].significanceHeuristic)); - InternalSignificantTerms.Bucket originalBucket = (InternalSignificantTerms.Bucket) sigTerms[0].buckets.get(0); - InternalSignificantTerms.Bucket streamedBucket = (InternalSignificantTerms.Bucket) sigTerms[1].buckets.get(0); + assertEquals(sigTerms.significanceHeuristic, read.significanceHeuristic); + SignificantTerms.Bucket originalBucket = sigTerms.getBuckets().get(0); + SignificantTerms.Bucket streamedBucket = read.getBuckets().get(0); assertThat(originalBucket.getKeyAsString(), equalTo(streamedBucket.getKeyAsString())); assertThat(originalBucket.getSupersetDf(), equalTo(streamedBucket.getSupersetDf())); assertThat(originalBucket.getSubsetDf(), equalTo(streamedBucket.getSubsetDf())); @@ -121,22 +123,18 @@ public class SignificanceHeuristicTests extends ESTestCase { assertThat(streamedBucket.getSupersetSize(), equalTo(20L)); } - InternalSignificantTerms[] getRandomSignificantTerms(SignificanceHeuristic heuristic) { - InternalSignificantTerms[] sTerms = new InternalSignificantTerms[2]; - ArrayList buckets = new ArrayList<>(); + InternalMappedSignificantTerms getRandomSignificantTerms(SignificanceHeuristic heuristic) { if (randomBoolean()) { - buckets.add(new SignificantLongTerms.Bucket(1, 2, 3, 4, 123, InternalAggregations.EMPTY, null)); - sTerms[0] = new SignificantLongTerms(10, 20, "some_name", DocValueFormat.RAW, 1, 1, heuristic, buckets, - Collections.emptyList(), null); - sTerms[1] = new SignificantLongTerms(); + SignificantLongTerms.Bucket bucket = new SignificantLongTerms.Bucket(1, 2, 3, 4, 123, InternalAggregations.EMPTY, + DocValueFormat.RAW); + return new SignificantLongTerms("some_name", 1, 1, emptyList(), null, DocValueFormat.RAW, 10, 20, heuristic, + singletonList(bucket)); } else { - BytesRef term = new BytesRef("someterm"); - buckets.add(new SignificantStringTerms.Bucket(term, 1, 2, 3, 4, InternalAggregations.EMPTY, DocValueFormat.RAW)); - sTerms[0] = new SignificantStringTerms(10, 20, "some_name", DocValueFormat.RAW, 1, 1, heuristic, buckets, - Collections.emptyList(), null); - sTerms[1] = new SignificantStringTerms(); + SignificantStringTerms.Bucket bucket = new SignificantStringTerms.Bucket(new BytesRef("someterm"), 1, 2, 3, 4, + InternalAggregations.EMPTY, DocValueFormat.RAW); + return new SignificantStringTerms("some_name", 1, 1, emptyList(), null, DocValueFormat.RAW, 10, 20, heuristic, + singletonList(bucket)); } - return sTerms; } SignificanceHeuristic getRandomSignificanceheuristic() { @@ -164,37 +162,54 @@ public class SignificanceHeuristicTests extends ESTestCase { // Create aggregations as they might come from three different shards and return as list. private List createInternalAggregations() { - - String type = randomBoolean() ? "long" : "string"; SignificanceHeuristic significanceHeuristic = getRandomSignificanceheuristic(); + TestAggFactory factory = randomBoolean() ? new StringTestAggFactory() : new LongTestAggFactory(); List aggs = new ArrayList<>(); - List terms0Buckets = new ArrayList<>(); - terms0Buckets.add(createBucket(type, 4, 4, 5, 10, 0)); - aggs.add(createAggregation(type, significanceHeuristic, terms0Buckets, 4, 10)); - List terms1Buckets = new ArrayList<>(); - terms0Buckets.add(createBucket(type, 4, 4, 5, 10, 1)); - aggs.add(createAggregation(type, significanceHeuristic, terms1Buckets, 4, 10)); - List terms01Buckets = new ArrayList<>(); - terms0Buckets.add(createBucket(type, 4, 8, 5, 10, 0)); - terms0Buckets.add(createBucket(type, 4, 8, 5, 10, 1)); - aggs.add(createAggregation(type, significanceHeuristic, terms01Buckets, 8, 10)); + aggs.add(factory.createAggregation(significanceHeuristic, 4, 10, 1, (f, i) -> f.createBucket(4, 4, 5, 10, 0))); + aggs.add(factory.createAggregation(significanceHeuristic, 4, 10, 1, (f, i) -> f.createBucket(4, 4, 5, 10, 1))); + aggs.add(factory.createAggregation(significanceHeuristic, 8, 10, 2, (f, i) -> f.createBucket(4, 4, 5, 10, i))); return aggs; } - private InternalSignificantTerms createAggregation(String type, SignificanceHeuristic significanceHeuristic, List buckets, long subsetSize, long supersetSize) { - if (type.equals("string")) { - return new SignificantStringTerms(subsetSize, supersetSize, "sig_terms", DocValueFormat.RAW, 2, -1, significanceHeuristic, buckets, new ArrayList(), new HashMap()); - } else { - return new SignificantLongTerms(subsetSize, supersetSize, "sig_terms", DocValueFormat.RAW, 2, -1, significanceHeuristic, buckets, new ArrayList(), new HashMap()); + private abstract class TestAggFactory, B extends InternalSignificantTerms.Bucket> { + final A createAggregation(SignificanceHeuristic significanceHeuristic, long subsetSize, long supersetSize, int bucketCount, + BiFunction, Integer, B> bucketFactory) { + List buckets = IntStream.range(0, bucketCount).mapToObj(i -> bucketFactory.apply(this, i)) + .collect(Collectors.toList()); + return createAggregation(significanceHeuristic, subsetSize, supersetSize, buckets); + } + + abstract A createAggregation(SignificanceHeuristic significanceHeuristic, long subsetSize, long supersetSize, List buckets); + + abstract B createBucket(long subsetDF, long subsetSize, long supersetDF, long supersetSize, long label); + } + private class StringTestAggFactory extends TestAggFactory { + @Override + SignificantStringTerms createAggregation(SignificanceHeuristic significanceHeuristic, long subsetSize, long supersetSize, + List buckets) { + return new SignificantStringTerms("sig_terms", 2, -1, emptyList(), + emptyMap(), DocValueFormat.RAW, subsetSize, supersetSize, significanceHeuristic, buckets); + } + + @Override + SignificantStringTerms.Bucket createBucket(long subsetDF, long subsetSize, long supersetDF, long supersetSize, long label) { + return new SignificantStringTerms.Bucket(new BytesRef(Long.toString(label).getBytes(StandardCharsets.UTF_8)), subsetDF, + subsetSize, supersetDF, supersetSize, InternalAggregations.EMPTY, DocValueFormat.RAW); } } + private class LongTestAggFactory extends TestAggFactory { + @Override + SignificantLongTerms createAggregation(SignificanceHeuristic significanceHeuristic, long subsetSize, long supersetSize, + List buckets) { + return new SignificantLongTerms("sig_terms", 2, -1, new ArrayList(), emptyMap(), DocValueFormat.RAW, + subsetSize, supersetSize, significanceHeuristic, buckets); + } - private InternalSignificantTerms.Bucket createBucket(String type, long subsetDF, long subsetSize, long supersetDF, long supersetSize, long label) { - if (type.equals("string")) { - return new SignificantStringTerms.Bucket(new BytesRef(Long.toString(label).getBytes(StandardCharsets.UTF_8)), subsetDF, subsetSize, supersetDF, supersetSize, InternalAggregations.EMPTY, DocValueFormat.RAW); - } else { - return new SignificantLongTerms.Bucket(subsetDF, subsetSize, supersetDF, supersetSize, label, InternalAggregations.EMPTY, DocValueFormat.RAW); + @Override + SignificantLongTerms.Bucket createBucket(long subsetDF, long subsetSize, long supersetDF, long supersetSize, long label) { + return new SignificantLongTerms.Bucket(subsetDF, subsetSize, supersetDF, supersetSize, label, InternalAggregations.EMPTY, + DocValueFormat.RAW); } } @@ -202,7 +217,7 @@ public class SignificanceHeuristicTests extends ESTestCase { // 1. The output of the builders can actually be parsed // 2. The parser does not swallow parameters after a significance heuristic was defined public void testBuilderAndParser() throws Exception { - SearchModule searchModule = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry()); + SearchModule searchModule = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, emptyList()); ParseFieldRegistry heuristicParserMapper = searchModule.getSignificanceHeuristicParserRegistry(); SearchContext searchContext = new SignificantTermsTestSearchContext(); @@ -213,14 +228,22 @@ public class SignificanceHeuristicTests extends ESTestCase { // test mutual information with string boolean includeNegatives = randomBoolean(); boolean backgroundIsSuperset = randomBoolean(); - assertThat(parseFromString(heuristicParserMapper, searchContext, "\"mutual_information\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":" + backgroundIsSuperset + "}"), equalTo((SignificanceHeuristic) (new MutualInformation(includeNegatives, backgroundIsSuperset)))); - assertThat(parseFromString(heuristicParserMapper, searchContext, "\"chi_square\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":" + backgroundIsSuperset + "}"), equalTo((SignificanceHeuristic) (new ChiSquare(includeNegatives, backgroundIsSuperset)))); + String mutual = "\"mutual_information\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":" + + backgroundIsSuperset + "}"; + assertEquals(new MutualInformation(includeNegatives, backgroundIsSuperset), + parseFromString(heuristicParserMapper, searchContext, mutual)); + String chiSquare = "\"chi_square\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":" + + backgroundIsSuperset + "}"; + assertEquals(new ChiSquare(includeNegatives, backgroundIsSuperset), + parseFromString(heuristicParserMapper, searchContext, chiSquare)); // test with builders - assertTrue(parseFromBuilder(heuristicParserMapper, searchContext, new JLHScore()) instanceof JLHScore); - assertTrue(parseFromBuilder(heuristicParserMapper, searchContext, new GND(backgroundIsSuperset)) instanceof GND); - assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new MutualInformation(includeNegatives, backgroundIsSuperset)), equalTo((SignificanceHeuristic) new MutualInformation(includeNegatives, backgroundIsSuperset))); - assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new ChiSquare(includeNegatives, backgroundIsSuperset)), equalTo((SignificanceHeuristic) new ChiSquare(includeNegatives, backgroundIsSuperset))); + assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new JLHScore()), instanceOf(JLHScore.class)); + assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new GND(backgroundIsSuperset)), instanceOf(GND.class)); + assertEquals(new MutualInformation(includeNegatives, backgroundIsSuperset), + parseFromBuilder(heuristicParserMapper, searchContext, new MutualInformation(includeNegatives, backgroundIsSuperset))); + assertEquals(new ChiSquare(includeNegatives, backgroundIsSuperset), + parseFromBuilder(heuristicParserMapper, searchContext, new ChiSquare(includeNegatives, backgroundIsSuperset))); // test exceptions String faultyHeuristicdefinition = "\"mutual_information\":{\"include_negatives\": false, \"some_unknown_field\": false}"; @@ -245,7 +268,8 @@ public class SignificanceHeuristicTests extends ESTestCase { IndicesQueriesRegistry registry = new IndicesQueriesRegistry(); try { - XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}"); + XContentParser stParser = JsonXContent.jsonXContent.createParser( + "{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}"); QueryParseContext parseContext = new QueryParseContext(registry, stParser, ParseFieldMatcher.STRICT); stParser.nextToken(); new SignificantTermsParser(significanceHeuristicParserRegistry, registry).parse("testagg", parseContext); @@ -282,7 +306,8 @@ public class SignificanceHeuristicTests extends ESTestCase { protected SignificanceHeuristic parseFromString(ParseFieldRegistry significanceHeuristicParserRegistry, SearchContext searchContext, String heuristicString) throws IOException { - XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}"); + XContentParser stParser = JsonXContent.jsonXContent.createParser( + "{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}"); return parseSignificanceHeuristic(significanceHeuristicParserRegistry, searchContext, stParser); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index 49ef9e1b6b5..5cc6ec58630 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -181,7 +181,7 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase { // Added to debug a test failure where the terms aggregation seems to be reporting two documents with the same value for NUMBER_FIELD_NAME. This will check that after // random indexing each document only has 1 value for NUMBER_FIELD_NAME and it is the correct value. Following this initial change its seems that this call was getting // more that 2000 hits (actual value was 2059) so now it will also check to ensure all hits have the correct index and type - SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME).addField(NUMBER_FIELD_NAME).addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME) + SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME).addStoredField(NUMBER_FIELD_NAME).addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME) .order(SortOrder.ASC)).setSize(5000).get(); assertSearchResponse(response); long totalHits = response.getHits().totalHits(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/CardinalityTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java similarity index 79% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/CardinalityTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java index 97447cf2df3..594eba7ddb5 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/CardinalityTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java @@ -17,14 +17,15 @@ * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; -import org.elasticsearch.script.groovy.GroovyPlugin; import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -33,9 +34,14 @@ import org.elasticsearch.test.ESIntegTestCase; import java.util.Collection; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; +import static java.util.Collections.emptyMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.script.ScriptService.ScriptType; import static org.elasticsearch.search.aggregations.AggregationBuilders.cardinality; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; @@ -45,11 +51,44 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; @ESIntegTestCase.SuiteScopeTestCase -public class CardinalityTests extends ESIntegTestCase { +public class CardinalityIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("_value", vars -> vars.get("_value")); + + scripts.put("doc['str_value'].value", vars -> { + Map doc = (Map) vars.get("doc"); + return doc.get("str_value"); + }); + + scripts.put("doc['str_values'].values", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Strings strValue = (ScriptDocValues.Strings) doc.get("str_values"); + return strValue.getValues(); + }); + + scripts.put("doc[' + singleNumericField() + '].value", vars -> { + Map doc = (Map) vars.get("doc"); + return doc.get(singleNumericField()); + }); + + scripts.put("doc[' + multiNumericField(false) + '].values", vars -> { + Map doc = (Map) vars.get("doc"); + return ((ScriptDocValues) doc.get(multiNumericField(false))).getValues(); + }); + + return scripts; + } } @Override @@ -123,11 +162,11 @@ public class CardinalityTests extends ESIntegTestCase { assertThat(count.getValue(), greaterThan(0L)); } } - private String singleNumericField() { + private static String singleNumericField() { return randomBoolean() ? "l_value" : "d_value"; } - private String multiNumericField(boolean hash) { + private static String multiNumericField(boolean hash) { return randomBoolean() ? "l_values" : "d_values"; } @@ -264,7 +303,9 @@ public class CardinalityTests extends ESIntegTestCase { public void testSingleValuedStringScript() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation( - cardinality("cardinality").precisionThreshold(precisionThreshold).script(new Script("doc['str_value'].value"))) + cardinality("cardinality") + .precisionThreshold(precisionThreshold) + .script(new Script("doc['str_value'].value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()))) .execute().actionGet(); assertSearchResponse(response); @@ -278,7 +319,9 @@ public class CardinalityTests extends ESIntegTestCase { public void testMultiValuedStringScript() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation( - cardinality("cardinality").precisionThreshold(precisionThreshold).script(new Script("doc['str_values'].values"))) + cardinality("cardinality") + .precisionThreshold(precisionThreshold) + .script(new Script("doc['str_values'].values", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()))) .execute().actionGet(); assertSearchResponse(response); @@ -290,10 +333,9 @@ public class CardinalityTests extends ESIntegTestCase { } public void testSingleValuedNumericScript() throws Exception { + Script script = new Script("doc[' + singleNumericField() + '].value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()); SearchResponse response = client().prepareSearch("idx").setTypes("type") - .addAggregation( - cardinality("cardinality").precisionThreshold(precisionThreshold).script( - new Script("doc['" + singleNumericField() + "'].value"))) + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script)) .execute().actionGet(); assertSearchResponse(response); @@ -305,10 +347,9 @@ public class CardinalityTests extends ESIntegTestCase { } public void testMultiValuedNumericScript() throws Exception { + Script script = new Script("doc[' + multiNumericField(false) + '].values", ScriptType.INLINE, CustomScriptPlugin.NAME, null); SearchResponse response = client().prepareSearch("idx").setTypes("type") - .addAggregation( - cardinality("cardinality").precisionThreshold(precisionThreshold).script( - new Script("doc['" + multiNumericField(false) + "'].values"))) + .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script)) .execute().actionGet(); assertSearchResponse(response); @@ -322,7 +363,10 @@ public class CardinalityTests extends ESIntegTestCase { public void testSingleValuedStringValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation( - cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value").script(new Script("_value"))) + cardinality("cardinality") + .precisionThreshold(precisionThreshold) + .field("str_value") + .script(new Script("_value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()))) .execute().actionGet(); assertSearchResponse(response); @@ -336,7 +380,10 @@ public class CardinalityTests extends ESIntegTestCase { public void testMultiValuedStringValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation( - cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_values").script(new Script("_value"))) + cardinality("cardinality") + .precisionThreshold(precisionThreshold) + .field("str_values") + .script(new Script("_value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()))) .execute().actionGet(); assertSearchResponse(response); @@ -350,8 +397,10 @@ public class CardinalityTests extends ESIntegTestCase { public void testSingleValuedNumericValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation( - cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField()) - .script(new Script("_value"))) + cardinality("cardinality") + .precisionThreshold(precisionThreshold) + .field(singleNumericField()) + .script(new Script("_value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()))) .execute().actionGet(); assertSearchResponse(response); @@ -365,8 +414,10 @@ public class CardinalityTests extends ESIntegTestCase { public void testMultiValuedNumericValueScript() throws Exception { SearchResponse response = client().prepareSearch("idx").setTypes("type") .addAggregation( - cardinality("cardinality").precisionThreshold(precisionThreshold).field(multiNumericField(false)) - .script(new Script("_value"))) + cardinality("cardinality") + .precisionThreshold(precisionThreshold) + .field(multiNumericField(false)) + .script(new Script("_value", ScriptType.INLINE, CustomScriptPlugin.NAME, emptyMap()))) .execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java index d97bc824602..3afba951b4a 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java @@ -44,9 +44,6 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; -/** - * - */ @ESIntegTestCase.SuiteScopeTestCase public class GeoBoundsIT extends AbstractGeoTestCase { private static final String aggName = "geoBounds"; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentileRanksTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java similarity index 83% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentileRanksTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 21fb5be6b9d..57bdc7d5dfc 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentileRanksTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -16,20 +16,19 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; @@ -41,6 +40,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; @@ -56,13 +56,11 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; -/** - * - */ -public class HDRPercentileRanksTests extends AbstractNumericTestCase { +public class HDRPercentileRanksIT extends AbstractNumericTestCase { + @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(AggregationTestScriptsPlugin.class); } private static double[] randomPercents(long minValue, long maxValue) { @@ -82,7 +80,7 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { } } Arrays.sort(percents); - Loggers.getLogger(HDRPercentileRanksTests.class).info("Using values={}", Arrays.toString(percents)); + Loggers.getLogger(HDRPercentileRanksIT.class).info("Using values={}", Arrays.toString(percents)); return percents; } @@ -208,7 +206,7 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { PercentileRanks values = global.getAggregations().get("percentile_ranks"); assertThat(values, notNullValue()); assertThat(values.getName(), equalTo("percentile_ranks")); - assertThat((PercentileRanks) global.getProperty("percentile_ranks"), sameInstance(values)); + assertThat(global.getProperty("percentile_ranks"), sameInstance(values)); } @@ -255,8 +253,12 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("value").script(new Script("_value - 1")).values(pcts)) + percentileRanks("percentile_ranks") + .method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("value") + .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -275,8 +277,12 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("value").script(new Script("_value - dec", ScriptType.INLINE, null, params)).values(pcts)) + percentileRanks("percentile_ranks") + .method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("value") + .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -311,8 +317,12 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("values").script(new Script("_value - 1")).values(pcts)) + percentileRanks("percentile_ranks") + .method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("values") + .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -328,8 +338,12 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("values").script(new Script("20 - _value")).values(pcts)) + percentileRanks("percentile_ranks") + .method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("values") + .script(new Script("20 - _value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -348,8 +362,12 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .field("values").script(new Script("_value - dec", ScriptType.INLINE, null, params)).values(pcts)) + percentileRanks("percentile_ranks") + .method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .field("values") + .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -366,8 +384,11 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .script(new Script("doc['value'].value")).values(pcts)) + percentileRanks("percentile_ranks") + .method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .script(new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -381,13 +402,19 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { int sigDigits = randomSignificantDigits(); Map params = new HashMap<>(); params.put("dec", 1); + + Script script = new Script("doc['value'].value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + final double[] pcts = randomPercents(minValue - 1, maxValue - 1); SearchResponse searchResponse = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).values(pcts)) + percentileRanks("percentile_ranks") + .method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .script(script) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -400,12 +427,18 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { public void testScriptMultiValued() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValues, maxValues); + + Script script = new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); + SearchResponse searchResponse = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentileRanks("percentile_ranks").method(PercentilesMethod.HDR).numberOfSignificantValueDigits(sigDigits) - .script(new Script("doc['values'].values")).values(pcts)) + percentileRanks("percentile_ranks") + .method(PercentilesMethod.HDR) + .numberOfSignificantValueDigits(sigDigits) + .script(script) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -417,8 +450,8 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { @Override public void testScriptMultiValuedWithParams() throws Exception { int sigDigits = randomSignificantDigits(); - Map params = new HashMap<>(); - params.put("dec", 1); + Script script = AggregationTestScriptsPlugin.DECREMENT_ALL_VALUES; + final double[] pcts = randomPercents(minValues - 1, maxValues - 1); SearchResponse searchResponse = client() .prepareSearch("idx") @@ -427,10 +460,8 @@ public class HDRPercentileRanksTests extends AbstractNumericTestCase { percentileRanks("percentile_ranks") .method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) - .script(new Script( - "List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;", - ScriptType.INLINE, null, params)) - .values(pcts)) + .script(script) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentilesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java similarity index 82% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentilesTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java index 9c21928798a..8112551f53c 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentilesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.logging.Loggers; @@ -24,13 +24,12 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod; @@ -42,6 +41,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static java.util.Collections.emptyMap; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; @@ -57,13 +57,11 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; -/** - * - */ -public class HDRPercentilesTests extends AbstractNumericTestCase { +public class HDRPercentilesIT extends AbstractNumericTestCase { + @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(AggregationTestScriptsPlugin.class); } private static double[] randomPercentiles() { @@ -83,7 +81,7 @@ public class HDRPercentilesTests extends AbstractNumericTestCase { } } Arrays.sort(percentiles); - Loggers.getLogger(HDRPercentilesTests.class).info("Using percentiles={}", Arrays.toString(percentiles)); + Loggers.getLogger(HDRPercentilesIT.class).info("Using percentiles={}", Arrays.toString(percentiles)); return percentiles; } @@ -130,8 +128,8 @@ public class HDRPercentilesTests extends AbstractNumericTestCase { percentiles("percentiles").field("value") .numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) - .percentiles(10, - 15))).execute().actionGet(); + .percentiles(10, 15))) + .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L)); Histogram histo = searchResponse.getAggregations().get("histo"); @@ -210,7 +208,7 @@ public class HDRPercentilesTests extends AbstractNumericTestCase { Percentiles percentiles = global.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); assertThat(percentiles.getName(), equalTo("percentiles")); - assertThat((Percentiles) global.getProperty("percentiles"), sameInstance(percentiles)); + assertThat(global.getProperty("percentiles"), sameInstance(percentiles)); } @@ -240,8 +238,13 @@ public class HDRPercentilesTests extends AbstractNumericTestCase { .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("value") - .script(new Script("_value - 1")).percentiles(pcts)).execute().actionGet(); + percentiles("percentiles") + .numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("value") + .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .percentiles(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -253,15 +256,20 @@ public class HDRPercentilesTests extends AbstractNumericTestCase { public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); + final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); SearchResponse searchResponse = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("value") - .script(new Script("_value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)).execute() - .actionGet(); + percentiles("percentiles") + .numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("value") + .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .percentiles(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -295,8 +303,13 @@ public class HDRPercentilesTests extends AbstractNumericTestCase { .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("values") - .script(new Script("_value - 1")).percentiles(pcts)).execute().actionGet(); + percentiles("percentiles") + .numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("values") + .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .percentiles(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -311,8 +324,13 @@ public class HDRPercentilesTests extends AbstractNumericTestCase { .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("values") - .script(new Script("20 - _value")).percentiles(pcts)).execute().actionGet(); + percentiles("percentiles") + .numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("values") + .script(new Script("20 - _value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .percentiles(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -324,15 +342,20 @@ public class HDRPercentilesTests extends AbstractNumericTestCase { public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); + final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); SearchResponse searchResponse = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR).field("values") - .script(new Script("_value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)).execute() - .actionGet(); + percentiles("percentiles") + .numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .field("values") + .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .percentiles(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -348,8 +371,12 @@ public class HDRPercentilesTests extends AbstractNumericTestCase { .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR) - .script(new Script("doc['value'].value")).percentiles(pcts)).execute().actionGet(); + percentiles("percentiles") + .numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .script(new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .percentiles(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -361,14 +388,20 @@ public class HDRPercentilesTests extends AbstractNumericTestCase { public void testScriptSingleValuedWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); + + Script script = new Script("doc['value'].value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); SearchResponse searchResponse = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR) - .script(new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)).percentiles(pcts)) + percentiles("percentiles") + .numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .script(script) + .percentiles(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -381,12 +414,19 @@ public class HDRPercentilesTests extends AbstractNumericTestCase { public void testScriptMultiValued() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); + + Script script = new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); + SearchResponse searchResponse = client() .prepareSearch("idx") .setQuery(matchAllQuery()) .addAggregation( - percentiles("percentiles").numberOfSignificantValueDigits(sigDigits).method(PercentilesMethod.HDR) - .script(new Script("doc['values'].values")).percentiles(pcts)).execute().actionGet(); + percentiles("percentiles") + .numberOfSignificantValueDigits(sigDigits) + .method(PercentilesMethod.HDR) + .script(script) + .percentiles(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -396,8 +436,8 @@ public class HDRPercentilesTests extends AbstractNumericTestCase { @Override public void testScriptMultiValuedWithParams() throws Exception { - Map params = new HashMap<>(); - params.put("dec", 1); + Script script = AggregationTestScriptsPlugin.DECREMENT_ALL_VALUES; + final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); SearchResponse searchResponse = client() @@ -407,9 +447,9 @@ public class HDRPercentilesTests extends AbstractNumericTestCase { percentiles("percentiles") .numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) - .script(new Script( - "List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;", - ScriptType.INLINE, null, params)).percentiles(pcts)).execute().actionGet(); + .script(script) + .percentiles(pcts)) + .execute().actionGet(); assertHitCount(searchResponse, 10); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java similarity index 83% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 7e76b3f03eb..e50b89d8b96 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -16,21 +16,22 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder; @@ -42,7 +43,9 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Function; +import static java.util.Collections.emptyMap; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; @@ -57,13 +60,11 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; -/** - * - */ -public class TDigestPercentileRanksTests extends AbstractNumericTestCase { +public class TDigestPercentileRanksIT extends AbstractNumericTestCase { + @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(AggregationTestScriptsPlugin.class); } private static double[] randomPercents(long minValue, long maxValue) { @@ -84,7 +85,7 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { } } Arrays.sort(percents); - Loggers.getLogger(TDigestPercentileRanksTests.class).info("Using values={}", Arrays.toString(percents)); + Loggers.getLogger(TDigestPercentileRanksIT.class).info("Using values={}", Arrays.toString(percents)); return percents; } @@ -198,8 +199,7 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { PercentileRanks values = global.getAggregations().get("percentile_ranks"); assertThat(values, notNullValue()); assertThat(values.getName(), equalTo("percentile_ranks")); - assertThat((PercentileRanks) global.getProperty("percentile_ranks"), sameInstance(values)); - + assertThat(global.getProperty("percentile_ranks"), sameInstance(values)); } public void testSingleValuedFieldOutsideRange() throws Exception { @@ -238,9 +238,12 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { final double[] pcts = randomPercents(minValue - 1, maxValue - 1); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks")) - .field("value").script(new Script("_value - 1")) - .values(pcts)) + .addAggregation( + randomCompression( + percentileRanks("percentile_ranks")) + .field("value") + .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -256,10 +259,12 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { final double[] pcts = randomPercents(minValue - 1, maxValue - 1); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks")) - .field("value") - .script(new Script("_value - dec", ScriptType.INLINE, null, params)) - .values(pcts)) + .addAggregation( + randomCompression( + percentileRanks("percentile_ranks")) + .field("value") + .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -289,9 +294,12 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { final double[] pcts = randomPercents(minValues - 1, maxValues - 1); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks")) - .field("values").script(new Script("_value - 1")) - .values(pcts)) + .addAggregation( + randomCompression( + percentileRanks("percentile_ranks")) + .field("values") + .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -304,9 +312,12 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { final double[] pcts = randomPercents(-maxValues, -minValues); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks")) - .field("values").script(new Script("_value * -1")) - .values(pcts)) + .addAggregation( + randomCompression( + percentileRanks("percentile_ranks")) + .field("values") + .script(new Script("_value * -1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -322,10 +333,12 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { final double[] pcts = randomPercents(minValues - 1, maxValues - 1); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks")) - .field("values") - .script(new Script("_value - dec", ScriptType.INLINE, null, params)) - .values(pcts)) + .addAggregation( + randomCompression( + percentileRanks("percentile_ranks")) + .field("values") + .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -339,9 +352,11 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { final double[] pcts = randomPercents(minValue, maxValue); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks")) - .script(new Script("doc['value'].value")) - .values(pcts)) + .addAggregation( + randomCompression( + percentileRanks("percentile_ranks")) + .script(new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -354,13 +369,17 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { public void testScriptSingleValuedWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); + + Script script = new Script("doc['value'].value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + final double[] pcts = randomPercents(minValue - 1, maxValue - 1); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks")) - .script( - new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)) - .values(pcts)) + .addAggregation( + randomCompression( + percentileRanks("percentile_ranks")) + .script(script) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -372,11 +391,14 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { @Override public void testScriptMultiValued() throws Exception { final double[] pcts = randomPercents(minValues, maxValues); + Script script = new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks")) - .script(new Script("doc['values'].values")) - .values(pcts)) + .addAggregation( + randomCompression( + percentileRanks("percentile_ranks")) + .script(script) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -387,16 +409,16 @@ public class TDigestPercentileRanksTests extends AbstractNumericTestCase { @Override public void testScriptMultiValuedWithParams() throws Exception { - Map params = new HashMap<>(); - params.put("dec", 1); + Script script = AggregationTestScriptsPlugin.DECREMENT_ALL_VALUES; + final double[] pcts = randomPercents(minValues - 1, maxValues - 1); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentileRanks("percentile_ranks")) - .script(new Script( - "List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;", - ScriptType.INLINE, null, params)) - .values(pcts)) + .addAggregation( + randomCompression( + percentileRanks("percentile_ranks")) + .script(script) + .values(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java similarity index 81% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java index 712c9ebd951..b0268351954 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.logging.Loggers; @@ -24,13 +24,12 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; +import org.elasticsearch.search.aggregations.AggregationTestScriptsPlugin; import org.elasticsearch.search.aggregations.bucket.filter.Filter; import org.elasticsearch.search.aggregations.bucket.global.Global; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order; import org.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.elasticsearch.search.aggregations.metrics.AbstractNumericTestCase; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder; @@ -43,6 +42,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static java.util.Collections.emptyMap; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; @@ -57,13 +57,11 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; -/** - * - */ -public class TDigestPercentilesTests extends AbstractNumericTestCase { +public class TDigestPercentilesIT extends AbstractNumericTestCase { + @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(AggregationTestScriptsPlugin.class); } private static double[] randomPercentiles() { @@ -83,7 +81,7 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { } } Arrays.sort(percentiles); - Loggers.getLogger(TDigestPercentilesTests.class).info("Using percentiles={}", Arrays.toString(percentiles)); + Loggers.getLogger(TDigestPercentilesIT.class).info("Using percentiles={}", Arrays.toString(percentiles)); return percentiles; } @@ -197,8 +195,7 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { Percentiles percentiles = global.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); assertThat(percentiles.getName(), equalTo("percentiles")); - assertThat((Percentiles) global.getProperty("percentiles"), sameInstance(percentiles)); - + assertThat(global.getProperty("percentiles"), sameInstance(percentiles)); } @Override @@ -222,9 +219,12 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")) -.field("value").script(new Script("_value - 1")) - .percentiles(pcts)) + .addAggregation( + randomCompression( + percentiles("percentiles")) + .field("value") + .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .percentiles(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -240,10 +240,12 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")) -.field("value") - .script(new Script("_value - dec", ScriptType.INLINE, null, params)) - .percentiles(pcts)) + .addAggregation( + randomCompression( + percentiles("percentiles")) + .field("value") + .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .percentiles(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -257,9 +259,7 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")) - .field("values") - .percentiles(pcts)) + .addAggregation(randomCompression(percentiles("percentiles")).field("values").percentiles(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -273,9 +273,12 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")) -.field("values").script(new Script("_value - 1")) - .percentiles(pcts)) + .addAggregation( + randomCompression( + percentiles("percentiles")) + .field("values") + .script(new Script("_value - 1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .percentiles(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -288,9 +291,12 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")) - .field("values").script(new Script("_value * -1")) - .percentiles(pcts)) + .addAggregation( + randomCompression( + percentiles("percentiles")) + .field("values") + .script(new Script("_value * -1", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap())) + .percentiles(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -306,10 +312,12 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")) -.field("values") - .script(new Script("_value - dec", ScriptType.INLINE, null, params)) - .percentiles(pcts)) + .addAggregation( + randomCompression( + percentiles("percentiles")) + .field("values") + .script(new Script("_value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params)) + .percentiles(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -320,12 +328,15 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { @Override public void testScriptSingleValued() throws Exception { + Script script = new Script("doc['value'].value", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")) -.script(new Script("doc['value'].value")) - .percentiles(pcts)) + .addAggregation( + randomCompression( + percentiles("percentiles")) + .script(script) + .percentiles(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -338,13 +349,17 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { public void testScriptSingleValuedWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); + + Script script = new Script("doc['value'].value - dec", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, params); + final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")) - .script( - new Script("doc['value'].value - dec", ScriptType.INLINE, null, params)) - .percentiles(pcts)) + .addAggregation( + randomCompression( + percentiles("percentiles")) + .script(script) + .percentiles(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -356,11 +371,15 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { @Override public void testScriptMultiValued() throws Exception { final double[] pcts = randomPercentiles(); + Script script = new Script("doc['values'].values", ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, emptyMap()); + SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")) -.script(new Script("doc['values'].values")) - .percentiles(pcts)) + .addAggregation( + randomCompression( + percentiles("percentiles")) + .script(script) + .percentiles(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); @@ -371,16 +390,16 @@ public class TDigestPercentilesTests extends AbstractNumericTestCase { @Override public void testScriptMultiValuedWithParams() throws Exception { - Map params = new HashMap<>(); - params.put("dec", 1); + Script script = AggregationTestScriptsPlugin.DECREMENT_ALL_VALUES; + final double[] pcts = randomPercentiles(); SearchResponse searchResponse = client().prepareSearch("idx") .setQuery(matchAllQuery()) - .addAggregation(randomCompression(percentiles("percentiles")) - .script(new Script( - "List values = doc['values'].values; double[] res = new double[values.size()]; for (int i = 0; i < res.length; i++) { res[i] = values.get(i) - dec; }; return res;", - ScriptType.INLINE, null, params)) - .percentiles(pcts)) + .addAggregation( + randomCompression( + percentiles("percentiles")) + .script(script) + .percentiles(pcts)) .execute().actionGet(); assertHitCount(searchResponse, 10); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 84455c231aa..bfd4ed8ada9 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptEngine; +import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; @@ -54,6 +55,8 @@ import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; +import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.smileBuilder; @@ -92,7 +95,14 @@ public class TopHitsIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(MockScriptEngine.TestPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + @Override + protected Map, Object>> pluginScripts() { + return Collections.emptyMap(); + } } public static String randomExecutionHint() { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java index 36c4caae12d..97d5cf1f9ee 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/PercentilesMethodTests.java @@ -38,14 +38,14 @@ public class PercentilesMethodTests extends ESTestCase { public void testwriteTo() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { PercentilesMethod.TDIGEST.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { PercentilesMethod.HDR.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(1)); } } @@ -54,13 +54,13 @@ public class PercentilesMethodTests extends ESTestCase { public void testReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(0); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(PercentilesMethod.readFromStream(in), equalTo(PercentilesMethod.TDIGEST)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(1); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(PercentilesMethod.readFromStream(in), equalTo(PercentilesMethod.HDR)); } } @@ -69,7 +69,7 @@ public class PercentilesMethodTests extends ESTestCase { public void testInvalidReadFrom() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { out.writeVInt(randomIntBetween(2, Integer.MAX_VALUE)); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { PercentilesMethod.readFromStream(in); fail("Expected IOException"); } catch(IOException e) { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketScriptTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java similarity index 84% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketScriptTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java index e481e2f59ae..220dfd29817 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketScriptTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java @@ -17,23 +17,21 @@ * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; -import org.elasticsearch.script.groovy.GroovyScriptEngineService; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram.Bucket; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; -import org.elasticsearch.search.aggregations.pipeline.SimpleValue; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; @@ -43,18 +41,20 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.bucketScript; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @ESIntegTestCase.SuiteScopeTestCase -public class BucketScriptTests extends ESIntegTestCase { +public class BucketScriptIT extends ESIntegTestCase { private static final String FIELD_1_NAME = "field1"; private static final String FIELD_2_NAME = "field2"; @@ -68,7 +68,54 @@ public class BucketScriptTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("_value0 + _value1 + _value2", vars -> { + double value0 = (double) vars.get("_value0"); + double value1 = (double) vars.get("_value1"); + double value2 = (double) vars.get("_value2"); + return value0 + value1 + value2; + }); + + scripts.put("_value0 + _value1 / _value2", vars -> { + double value0 = (double) vars.get("_value0"); + double value1 = (double) vars.get("_value1"); + double value2 = (double) vars.get("_value2"); + return value0 + value1 / value2; + }); + + scripts.put("_value0", vars -> vars.get("_value0")); + + scripts.put("foo + bar + baz", vars -> { + double foo = (double) vars.get("foo"); + double bar = (double) vars.get("bar"); + double baz = (double) vars.get("baz"); + return foo + bar + baz; + }); + + scripts.put("(_value0 + _value1 + _value2) * factor", vars -> { + double value0 = (double) vars.get("_value0"); + double value1 = (double) vars.get("_value1"); + double value2 = (double) vars.get("_value2"); + return (value0 + value1 + value2) * (int) vars.get("factor"); + }); + + scripts.put("my_script", vars -> { + double value0 = (double) vars.get("_value0"); + double value1 = (double) vars.get("_value1"); + double value2 = (double) vars.get("_value2"); + return value0 + value1 + value2; + }); + + return scripts; + } } @Override @@ -86,11 +133,6 @@ public class BucketScriptTests extends ESIntegTestCase { builders.add(client().prepareIndex("idx", "type").setSource(newDocBuilder())); } - client().admin().cluster().preparePutStoredScript() - .setId("my_script") - .setScriptLang(GroovyScriptEngineService.NAME) - .setSource(new BytesArray("{ \"script\": \"_value0 + _value1 + _value2\" }")).get(); - indexRandom(true, builders); ensureSearchable(); } @@ -117,8 +159,10 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic", new Script("_value0 + _value1 + _value2", ScriptType.INLINE, null, null) - , "field2Sum", "field3Sum", "field4Sum"))).execute().actionGet(); + bucketScript("seriesArithmetic", + new Script("_value0 + _value1 + _value2", ScriptType.INLINE, CustomScriptPlugin.NAME, null) + , "field2Sum", "field3Sum", "field4Sum"))) + .execute().actionGet(); assertSearchResponse(response); @@ -161,8 +205,10 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic", new Script("_value0 + _value1 / _value2", ScriptType.INLINE, null, null), - "field2Sum", "field3Sum", "field4Sum"))).execute().actionGet(); + bucketScript("seriesArithmetic", + new Script("_value0 + _value1 / _value2", ScriptType.INLINE, CustomScriptPlugin.NAME, null), + "field2Sum", "field3Sum", "field4Sum"))) + .execute().actionGet(); assertSearchResponse(response); @@ -203,8 +249,10 @@ public class BucketScriptTests extends ESIntegTestCase { .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation( - bucketScript("seriesArithmetic", new Script("_value0", ScriptType.INLINE, null, null), - "field2Sum"))).execute().actionGet(); + bucketScript("seriesArithmetic", + new Script("_value0", ScriptType.INLINE, CustomScriptPlugin.NAME, null), + "field2Sum"))) + .execute().actionGet(); assertSearchResponse(response); @@ -246,7 +294,8 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( bucketScript("seriesArithmetic", bucketsPathsMap, - new Script("foo + bar + baz", ScriptType.INLINE, null, null)))).execute().actionGet(); + new Script("foo + bar + baz", ScriptType.INLINE, CustomScriptPlugin.NAME, null)))) + .execute().actionGet(); assertSearchResponse(response); @@ -281,6 +330,9 @@ public class BucketScriptTests extends ESIntegTestCase { public void testInlineScriptWithParams() { Map params = new HashMap<>(); params.put("factor", 3); + + Script script = new Script("(_value0 + _value1 + _value2) * factor", ScriptType.INLINE, CustomScriptPlugin.NAME, params); + SearchResponse response = client() .prepareSearch("idx") .addAggregation( @@ -290,9 +342,8 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript("seriesArithmetic", new Script("(_value0 + _value1 + _value2) * factor", ScriptType.INLINE, null, params), - "field2Sum", "field3Sum", "field4Sum"))).execute().actionGet(); + .subAggregation(bucketScript("seriesArithmetic", script, "field2Sum", "field3Sum", "field4Sum"))) + .execute().actionGet(); assertSearchResponse(response); @@ -335,8 +386,10 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic", new Script("_value0 + _value1 + _value2", ScriptType.INLINE, null, null), - "field2Sum", "field3Sum", "field4Sum").gapPolicy(GapPolicy.INSERT_ZEROS))).execute().actionGet(); + bucketScript("seriesArithmetic", + new Script("_value0 + _value1 + _value2", ScriptType.INLINE, CustomScriptPlugin.NAME, null), + "field2Sum", "field3Sum", "field4Sum").gapPolicy(GapPolicy.INSERT_ZEROS))) + .execute().actionGet(); assertSearchResponse(response); @@ -370,7 +423,13 @@ public class BucketScriptTests extends ESIntegTestCase { } } - public void testIndexedScript() { + public void testStoredScript() { + assertAcked(client().admin().cluster().preparePutStoredScript() + .setId("my_script") + .setScriptLang(CustomScriptPlugin.NAME) + // Script source is not interpreted but it references a pre-defined script from CustomScriptPlugin + .setSource(new BytesArray("{ \"script\": \"my_script\" }"))); + SearchResponse response = client() .prepareSearch("idx") .addAggregation( @@ -381,7 +440,8 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic", new Script("my_script", ScriptType.STORED, null, null), + bucketScript("seriesArithmetic", + new Script("my_script", ScriptType.STORED, CustomScriptPlugin.NAME, null), "field2Sum", "field3Sum", "field4Sum"))).execute().actionGet(); assertSearchResponse(response); @@ -425,7 +485,8 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic", new Script("_value0 + _value1 + _value2", ScriptType.INLINE, null, null), + bucketScript("seriesArithmetic", + new Script("_value0 + _value1 + _value2", ScriptType.INLINE, CustomScriptPlugin.NAME, null), "field2Sum", "field3Sum", "field4Sum"))) .execute().actionGet(); @@ -448,7 +509,8 @@ public class BucketScriptTests extends ESIntegTestCase { .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) .subAggregation( - bucketScript("seriesArithmetic", new Script("_value0 + _value1 + _value2", ScriptType.INLINE, null, null), + bucketScript("seriesArithmetic", + new Script("_value0 + _value1 + _value2", ScriptType.INLINE, CustomScriptPlugin.NAME, null), "field2Sum", "field3Sum", "field4Sum"))).execute().actionGet(); assertSearchResponse(response); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java similarity index 68% rename from modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java index 5b796a92d7f..64dc7e50bcc 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java @@ -17,17 +17,16 @@ * under the License. */ -package org.elasticsearch.messy.tests; +package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.groovy.GroovyPlugin; -import org.elasticsearch.script.groovy.GroovyScriptEngineService; import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; @@ -43,12 +42,14 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.bucketSelector; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.derivative; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -57,7 +58,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @ESIntegTestCase.SuiteScopeTestCase -public class BucketSelectorTests extends ESIntegTestCase { +public class BucketSelectorIT extends ESIntegTestCase { private static final String FIELD_1_NAME = "field1"; private static final String FIELD_2_NAME = "field2"; @@ -71,7 +72,69 @@ public class BucketSelectorTests extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singleton(GroovyPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + + @Override + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", vars -> { + double value0 = (double) vars.get("_value0"); + double value1 = (double) vars.get("_value1"); + return Double.isNaN(value0) ? false : (value0 + value1 > 100); + }); + + scripts.put("Double.isNaN(_value0) ? true : (_value0 < 10000)", vars -> { + double value0 = (double) vars.get("_value0"); + return Double.isNaN(value0) ? true : (value0 < 10000); + }); + + scripts.put("Double.isNaN(_value0) ? false : (_value0 > 10000)", vars -> { + double value0 = (double) vars.get("_value0"); + return Double.isNaN(value0) ? false : (value0 > 10000); + }); + + scripts.put("Double.isNaN(_value0) ? false : (_value0 < _value1)", vars -> { + double value0 = (double) vars.get("_value0"); + double value1 = (double) vars.get("_value1"); + return Double.isNaN(value0) ? false : (value0 < value1); + }); + + scripts.put("Double.isNaN(_value0) ? false : (_value0 > 100)", vars -> { + double value0 = (double) vars.get("_value0"); + return Double.isNaN(value0) ? false : (value0 > 10000); + }); + + scripts.put("Double.isNaN(my_value1) ? false : (my_value1 + my_value2 > 100)", vars -> { + double myValue1 = (double) vars.get("my_value1"); + double myValue2 = (double) vars.get("my_value2"); + return Double.isNaN(myValue1) ? false : (myValue1 + myValue2 > 100); + }); + + scripts.put("Double.isNaN(_value0) ? false : (_value0 + _value1 > threshold)", vars -> { + double value0 = (double) vars.get("_value0"); + double value1 = (double) vars.get("_value1"); + int threshold = (int) vars.get("threshold"); + return Double.isNaN(value0) ? false : (value0 + value1 > threshold); + }); + + scripts.put("_value0 + _value1 > 100", vars -> { + double value0 = (double) vars.get("_value0"); + double value1 = (double) vars.get("_value1"); + return (value0 + value1 > 100); + }); + + scripts.put("my_script", vars -> { + double value0 = (double) vars.get("_value0"); + double value1 = (double) vars.get("_value1"); + return Double.isNaN(value0) ? false : (value0 + value1 > 100); + }); + + return scripts; + } } @Override @@ -94,9 +157,6 @@ public class BucketSelectorTests extends ESIntegTestCase { builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(3, 1, 0, 0))); builders.add(client().prepareIndex("idx_with_gaps", "type").setSource(newDocBuilder(3, 3, 0, 0))); - client().admin().cluster().preparePutStoredScript().setId("my_script").setScriptLang(GroovyScriptEngineService.NAME) - .setSource(new BytesArray("{ \"script\": \"Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)\" }")).get(); - indexRandom(true, builders); ensureSearchable(); } @@ -118,12 +178,13 @@ public class BucketSelectorTests extends ESIntegTestCase { } public void testInlineScript() { + Script script = + new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client().prepareSearch("idx") .addAggregation(histogram("histo").field(FIELD_1_NAME).interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)).subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", - new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, null, null), - "field2Sum", "field3Sum"))) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) .execute().actionGet(); assertSearchResponse(response); @@ -146,6 +207,8 @@ public class BucketSelectorTests extends ESIntegTestCase { } public void testInlineScriptNoBucketsPruned() { + Script script = new Script("Double.isNaN(_value0) ? true : (_value0 < 10000)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .addAggregation( @@ -154,10 +217,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation( - bucketSelector("bucketSelector", new Script("Double.isNaN(_value0) ? true : (_value0 < 10000)", - ScriptType.INLINE, null, null), "field2Sum", "field3Sum"))).execute() - .actionGet(); + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) + .execute().actionGet(); assertSearchResponse(response); @@ -179,6 +240,8 @@ public class BucketSelectorTests extends ESIntegTestCase { } public void testInlineScriptNoBucketsLeft() { + Script script = new Script("Double.isNaN(_value0) ? false : (_value0 > 10000)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .addAggregation( @@ -187,9 +250,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation( - bucketSelector("bucketSelector", new Script("Double.isNaN(_value0) ? false : (_value0 > 10000)", - ScriptType.INLINE, null, null), "field2Sum", "field3Sum"))).execute().actionGet(); + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) + .execute().actionGet(); assertSearchResponse(response); @@ -201,6 +263,8 @@ public class BucketSelectorTests extends ESIntegTestCase { } public void testInlineScript2() { + Script script = new Script("Double.isNaN(_value0) ? false : (_value0 < _value1)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .addAggregation( @@ -209,9 +273,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation( - bucketSelector("bucketSelector", new Script("Double.isNaN(_value0) ? false : (_value0 < _value1)", - ScriptType.INLINE, null, null), "field2Sum", "field3Sum"))).execute().actionGet(); + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) + .execute().actionGet(); assertSearchResponse(response); @@ -233,6 +296,8 @@ public class BucketSelectorTests extends ESIntegTestCase { } public void testInlineScriptSingleVariable() { + Script script = new Script("Double.isNaN(_value0) ? false : (_value0 > 100)", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .addAggregation( @@ -240,9 +305,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .field(FIELD_1_NAME) .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation( - bucketSelector("bucketSelector", new Script("Double.isNaN(_value0) ? false : (_value0 > 100)", - ScriptType.INLINE,null, null), "field2Sum"))).execute().actionGet(); + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum"))) + .execute().actionGet(); assertSearchResponse(response); @@ -261,15 +325,21 @@ public class BucketSelectorTests extends ESIntegTestCase { } public void testInlineScriptNamedVars() { + Script script = new Script("Double.isNaN(my_value1) ? false : (my_value1 + my_value2 > 100)", ScriptType.INLINE, + CustomScriptPlugin.NAME, null); + Map bucketPathsMap = new HashMap<>(); bucketPathsMap.put("my_value1", "field2Sum"); bucketPathsMap.put("my_value2", "field3Sum"); SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(FIELD_1_NAME).interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)).subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", bucketPathsMap, new Script( - "Double.isNaN(my_value1) ? false : (my_value1 + my_value2 > 100)", ScriptType.INLINE, null, null)))) + .addAggregation( + histogram("histo") + .field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", bucketPathsMap, script))) .execute().actionGet(); assertSearchResponse(response); @@ -292,13 +362,17 @@ public class BucketSelectorTests extends ESIntegTestCase { } public void testInlineScriptWithParams() { - Map params = new HashMap<>(); - params.put("threshold", 100); - SearchResponse response = client().prepareSearch("idx").addAggregation(histogram("histo").field(FIELD_1_NAME).interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)).subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", - new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > threshold)", ScriptType.INLINE, null, params), - "field2Sum", "field3Sum"))) + Script script = new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > threshold)", ScriptType.INLINE, + CustomScriptPlugin.NAME, Collections.singletonMap("threshold", 100)); + + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + histogram("histo") + .field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) .execute().actionGet(); assertSearchResponse(response); @@ -321,13 +395,17 @@ public class BucketSelectorTests extends ESIntegTestCase { } public void testInlineScriptInsertZeros() { + Script script = new Script("_value0 + _value1 > 100", ScriptType.INLINE, CustomScriptPlugin.NAME, null); + SearchResponse response = client().prepareSearch("idx") .addAggregation( - histogram("histo").field(FIELD_1_NAME).interval(interval).subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + histogram("histo") + .field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", - new Script("_value0 + _value1 > 100", ScriptType.INLINE, null, null), "field2Sum", "field3Sum") - .gapPolicy(GapPolicy.INSERT_ZEROS))) + .subAggregation(bucketSelector("bucketSelector", script , "field2Sum", "field3Sum") + .gapPolicy(GapPolicy.INSERT_ZEROS))) .execute().actionGet(); assertSearchResponse(response); @@ -349,7 +427,15 @@ public class BucketSelectorTests extends ESIntegTestCase { } } - public void testIndexedScript() { + public void testStoredScript() { + assertAcked(client().admin().cluster().preparePutStoredScript() + .setId("my_script") + .setScriptLang(CustomScriptPlugin.NAME) + // Source is not interpreted but my_script is defined in CustomScriptPlugin + .setSource(new BytesArray("{ \"script\": \"Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)\" }"))); + + Script script = new Script("my_script", ScriptType.STORED, CustomScriptPlugin.NAME, null); + SearchResponse response = client() .prepareSearch("idx") .addAggregation( @@ -358,9 +444,8 @@ public class BucketSelectorTests extends ESIntegTestCase { .interval(interval) .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation( - bucketSelector("bucketSelector", new Script("my_script", ScriptType.STORED, null, null), - "field2Sum", "field3Sum"))).execute().actionGet(); + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) + .execute().actionGet(); assertSearchResponse(response); @@ -382,12 +467,17 @@ public class BucketSelectorTests extends ESIntegTestCase { } public void testUnmapped() throws Exception { + Script script = new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, + CustomScriptPlugin.NAME, null); + SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(histogram("histo").field(FIELD_1_NAME).interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)).subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", - new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, null, null), - "field2Sum", "field3Sum"))) + .addAggregation( + histogram("histo") + .field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) .execute().actionGet(); assertSearchResponse(response); @@ -399,12 +489,17 @@ public class BucketSelectorTests extends ESIntegTestCase { } public void testPartiallyUnmapped() throws Exception { + Script script = new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, + CustomScriptPlugin.NAME, null); + SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(histogram("histo").field(FIELD_1_NAME).interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)).subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(bucketSelector("bucketSelector", - new Script("Double.isNaN(_value0) ? false : (_value0 + _value1 > 100)", ScriptType.INLINE, null, null), - "field2Sum", "field3Sum"))) + .addAggregation( + histogram("histo") + .field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(bucketSelector("bucketSelector", script, "field2Sum", "field3Sum"))) .execute().actionGet(); assertSearchResponse(response); @@ -428,9 +523,18 @@ public class BucketSelectorTests extends ESIntegTestCase { public void testEmptyBuckets() { SearchResponse response = client().prepareSearch("idx_with_gaps") - .addAggregation(histogram("histo").field(FIELD_1_NAME).interval(1) - .subAggregation(histogram("inner_histo").field(FIELD_1_NAME).interval(1).extendedBounds(new ExtendedBounds(1L, 4L)) - .minDocCount(0).subAggregation(derivative("derivative", "_count").gapPolicy(GapPolicy.INSERT_ZEROS)))) + .addAggregation( + histogram("histo") + .field(FIELD_1_NAME) + .interval(1) + .subAggregation( + histogram("inner_histo") + .field(FIELD_1_NAME) + .interval(1) + .extendedBounds(new ExtendedBounds(1L, 4L)) + .minDocCount(0) + .subAggregation(derivative("derivative", "_count") + .gapPolicy(GapPolicy.INSERT_ZEROS)))) .execute().actionGet(); assertSearchResponse(response); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java index 393e1bbfdea..f1ee16855a7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeIT.java @@ -619,7 +619,6 @@ public class DerivativeIT extends ESIntegTestCase { public void testAvgMovavgDerivNPE() throws Exception { createIndex("movavg_npe"); - ensureYellow("movavg_npe"); for (int i = 0; i < 10; i++) { Integer value = i; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java index 64dd0f87caa..b27d4a5a4ce 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -1,5 +1,3 @@ -package org.elasticsearch.search.aggregations.pipeline; - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -19,7 +17,8 @@ package org.elasticsearch.search.aggregations.pipeline; * under the License. */ -import org.elasticsearch.ElasticsearchException; +package org.elasticsearch.search.aggregations.pipeline; + import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; @@ -45,7 +44,6 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.percentilesBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.core.IsNull.notNullValue; diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java index e1441b0dc54..390501d2002 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/ExtendedStatsBucketTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucketPipelineAggregationBuilder; import static org.hamcrest.Matchers.equalTo; @@ -51,7 +50,7 @@ public class ExtendedStatsBucketTests extends AbstractBucketMetricsTestCase(Collections.singleton(new BytesRef("foo"))), + null); + OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); + LongBitSet acceptedOrds = filter.acceptedGlobalOrdinals(DocValues.emptySortedSet()); + assertEquals(0, acceptedOrds.length()); + + inexcl = new IncludeExclude( + null, + new TreeSet<>(Collections.singleton(new BytesRef("foo")))); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); + acceptedOrds = filter.acceptedGlobalOrdinals(DocValues.emptySortedSet()); + assertEquals(0, acceptedOrds.length()); + } + + public void testSingleTermWithOrds() throws IOException { + RandomAccessOrds ords = new RandomAccessOrds() { + + boolean consumed = true; + + @Override + public void setDocument(int docID) { + consumed = false; + } + + @Override + public long nextOrd() { + if (consumed) { + return SortedSetDocValues.NO_MORE_ORDS; + } else { + consumed = true; + return 0; + } + } + + @Override + public BytesRef lookupOrd(long ord) { + assertEquals(0, ord); + return new BytesRef("foo"); + } + + @Override + public long getValueCount() { + return 1; + } + + @Override + public long ordAt(int index) { + return 0; + } + + @Override + public int cardinality() { + return 1; + } + }; + IncludeExclude inexcl = new IncludeExclude( + new TreeSet<>(Collections.singleton(new BytesRef("foo"))), + null); + OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); + LongBitSet acceptedOrds = filter.acceptedGlobalOrdinals(ords); + assertEquals(1, acceptedOrds.length()); + assertTrue(acceptedOrds.get(0)); + + inexcl = new IncludeExclude( + new TreeSet<>(Collections.singleton(new BytesRef("bar"))), + null); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); + acceptedOrds = filter.acceptedGlobalOrdinals(ords); + assertEquals(1, acceptedOrds.length()); + assertFalse(acceptedOrds.get(0)); + + inexcl = new IncludeExclude( + new TreeSet<>(Collections.singleton(new BytesRef("foo"))), + new TreeSet<>(Collections.singleton(new BytesRef("foo")))); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); + acceptedOrds = filter.acceptedGlobalOrdinals(ords); + assertEquals(1, acceptedOrds.length()); + assertFalse(acceptedOrds.get(0)); + + inexcl = new IncludeExclude( + null, // means everything included + new TreeSet<>(Collections.singleton(new BytesRef("foo")))); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); + acceptedOrds = filter.acceptedGlobalOrdinals(ords); + assertEquals(1, acceptedOrds.length()); + assertFalse(acceptedOrds.get(0)); + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java index 98ae3241dbb..faa960cb589 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java @@ -23,12 +23,10 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.client.Client; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESIntegTestCase; -import org.hamcrest.Matchers; import java.util.ArrayList; import java.util.List; @@ -38,13 +36,13 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.formatShardStatus; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; @ESIntegTestCase.ClusterScope(minNumDataNodes = 2) public class SearchWhileRelocatingIT extends ESIntegTestCase { - public void testSearchAndRelocateConcurrentlyRanodmReplicas() throws Exception { + public void testSearchAndRelocateConcurrentlyRandomReplicas() throws Exception { testSearchAndRelocateConcurrently(randomIntBetween(0, 1)); } @@ -67,25 +65,28 @@ public class SearchWhileRelocatingIT extends ESIntegTestCase { final int numIters = scaledRandomIntBetween(5, 20); for (int i = 0; i < numIters; i++) { final AtomicBoolean stop = new AtomicBoolean(false); - final List thrownExceptions = new CopyOnWriteArrayList<>(); - final List nonCriticalExceptions = new CopyOnWriteArrayList<>(); + final List nonCriticalExceptions = new CopyOnWriteArrayList<>(); Thread[] threads = new Thread[scaledRandomIntBetween(1, 3)]; for (int j = 0; j < threads.length; j++) { threads[j] = new Thread() { @Override public void run() { - boolean criticalException = true; try { while (!stop.get()) { SearchResponse sr = client().prepareSearch().setSize(numDocs).get(); - // if we did not search all shards but had no failures that is potentially fine - // if only the hit-count is wrong. this can happen if the cluster-state is behind when the - // request comes in. It's a small window but a known limitation. - // - criticalException = sr.getTotalShards() == sr.getSuccessfulShards() || sr.getFailedShards() > 0; - assertHitCount(sr, (numDocs)); - criticalException = true; + if (sr.getHits().totalHits() != numDocs) { + // if we did not search all shards but had no failures that is potentially fine + // if only the hit-count is wrong. this can happen if the cluster-state is behind when the + // request comes in. It's a small window but a known limitation. + if (sr.getTotalShards() != sr.getSuccessfulShards() && sr.getFailedShards() == 0) { + nonCriticalExceptions.add("Count is " + sr.getHits().totalHits() + " but " + numDocs + + " was expected. " + formatShardStatus(sr)); + } else { + assertHitCount(sr, numDocs); + } + } + final SearchHits sh = sr.getHits(); assertThat("Expected hits to be the same size the actual hits array", sh.getTotalHits(), equalTo((long) (sh.getHits().length))); @@ -96,13 +97,7 @@ public class SearchWhileRelocatingIT extends ESIntegTestCase { // it's possible that all shards fail if we have a small number of shards. // with replicas this should not happen if (numberOfReplicas == 1 || !ex.getMessage().contains("all shards failed")) { - thrownExceptions.add(ex); - } - } catch (Throwable t) { - if (!criticalException) { - nonCriticalExceptions.add(t); - } else { - thrownExceptions.add(t); + throw ex; } } } @@ -120,21 +115,12 @@ public class SearchWhileRelocatingIT extends ESIntegTestCase { // this might time out on some machines if they are really busy and you hit lots of throttling ClusterHealthResponse resp = client().admin().cluster().prepareHealth().setWaitForYellowStatus().setWaitForRelocatingShards(0).setWaitForEvents(Priority.LANGUID).setTimeout("5m").get(); assertNoTimeout(resp); - if (!thrownExceptions.isEmpty() || !nonCriticalExceptions.isEmpty()) { - Client client = client(); - boolean postSearchOK = true; - String verified = "POST SEARCH OK"; + // if we hit only non-critical exceptions we make sure that the post search works + if (!nonCriticalExceptions.isEmpty()) { + logger.info("non-critical exceptions: {}", nonCriticalExceptions); for (int j = 0; j < 10; j++) { - if (client.prepareSearch().get().getHits().getTotalHits() != numDocs) { - verified = "POST SEARCH FAIL"; - postSearchOK = false; - break; - } + assertHitCount(client().prepareSearch().get(), numDocs); } - assertThat("numberOfReplicas: " + numberOfReplicas + " failed in iteration " + i + ", verification: " + verified, thrownExceptions, Matchers.emptyIterable()); - // if we hit only non-critical exceptions we only make sure that the post search works - logger.info("Non-CriticalExceptions: {}", nonCriticalExceptions); - assertThat("numberOfReplicas: " + numberOfReplicas + " failed in iteration " + i + ", verification: " + verified, postSearchOK, is(true)); } } } diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java index bd1d6ed9795..bacc6d791d4 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java @@ -54,7 +54,6 @@ public class TransportSearchFailuresIT extends ESIntegTestCase { public void testFailedSearchWithWrongQuery() throws Exception { logger.info("Start Testing failed search with wrong query"); assertAcked(prepareCreate("test", 1)); - ensureYellow(); NumShards test = getNumShards("test"); diff --git a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index f3a78b65d78..19a9b1c65f3 100644 --- a/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -87,6 +87,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import static java.util.Collections.emptyList; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.CoreMatchers.containsString; @@ -145,7 +146,7 @@ public class SearchSourceBuilderTests extends ESTestCase { bindMapperExtension(); } }, - new SearchModule(settings, namedWriteableRegistry) { + new SearchModule(settings, namedWriteableRegistry, false, emptyList()) { @Override protected void configureSearch() { // Skip me @@ -221,12 +222,12 @@ public class SearchSourceBuilderTests extends ESTestCase { for (int i = 0; i < fieldsSize; i++) { fields.add(randomAsciiOfLengthBetween(5, 50)); } - builder.fields(fields); + builder.storedFields(fields); } if (randomBoolean()) { int fieldDataFieldsSize = randomInt(25); for (int i = 0; i < fieldDataFieldsSize; i++) { - builder.fieldDataField(randomAsciiOfLengthBetween(5, 50)); + builder.docValueField(randomAsciiOfLengthBetween(5, 50)); } } if (randomBoolean()) { @@ -444,7 +445,7 @@ public class SearchSourceBuilderTests extends ESTestCase { SearchSourceBuilder testBuilder = createSearchSourceBuilder(); try (BytesStreamOutput output = new BytesStreamOutput()) { testBuilder.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { SearchSourceBuilder deserializedBuilder = new SearchSourceBuilder(in); assertEquals(deserializedBuilder, testBuilder); assertEquals(deserializedBuilder.hashCode(), testBuilder.hashCode()); @@ -484,7 +485,7 @@ public class SearchSourceBuilderTests extends ESTestCase { protected SearchSourceBuilder copyBuilder(SearchSourceBuilder builder) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { builder.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return new SearchSourceBuilder(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java index f8ca1e1aaf7..68679e89ae6 100644 --- a/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java @@ -202,7 +202,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { refresh(); // TEST FETCHING _parent from child - SearchResponse searchResponse = client().prepareSearch("test").setQuery(idsQuery("child").addIds("c1")).fields("_parent").execute() + SearchResponse searchResponse = client().prepareSearch("test").setQuery(idsQuery("child").addIds("c1")).storedFields("_parent").execute() .actionGet(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(1L)); @@ -210,7 +210,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).field("_parent").value().toString(), equalTo("p1")); // TEST matching on parent - searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1")).fields("_parent").get(); + searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1")).storedFields("_parent").get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(2L)); assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2"))); @@ -218,7 +218,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(1).id(), anyOf(equalTo("c1"), equalTo("c2"))); assertThat(searchResponse.getHits().getAt(1).field("_parent").value().toString(), equalTo("p1")); - searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).fields("_parent").get(); + searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).storedFields("_parent").get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().totalHits(), equalTo(2L)); assertThat(searchResponse.getHits().getAt(0).id(), anyOf(equalTo("c1"), equalTo("c2"))); @@ -1394,7 +1394,7 @@ public class ChildQuerySearchIT extends ESIntegTestCase { SearchResponse scrollResponse = client().prepareSearch("test") .setScroll(TimeValue.timeValueSeconds(30)) .setSize(1) - .addField("_id") + .addStoredField("_id") .setQuery(query) .execute() .actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index 39df053c862..f2d92f62f1e 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -32,11 +32,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.SearchHitField; -import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHitField; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESIntegTestCase; @@ -47,8 +46,10 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; +import java.util.List; import java.util.Map; +import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; import static org.elasticsearch.client.Requests.indexRequest; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -79,7 +80,6 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase { .endObject() .endObject() .endObject().endObject()).execute().actionGet(); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); client().index( indexRequest("test").type("type1").id("1") @@ -99,13 +99,14 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase { equalTo(1)); } - public static class FetchTermVectorsPlugin extends Plugin { - public void onModule(SearchModule searchModule) { - searchModule.registerFetchSubPhase(new TermVectorsFetchSubPhase()); + public static class FetchTermVectorsPlugin extends Plugin implements SearchPlugin { + @Override + public List getFetchSubPhases(FetchPhaseConstructionContext context) { + return singletonList(new TermVectorsFetchSubPhase()); } } - public final static class TermVectorsFetchSubPhase implements FetchSubPhase { + public static final class TermVectorsFetchSubPhase implements FetchSubPhase { public static final ContextFactory CONTEXT_FACTORY = new ContextFactory() { diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index 155363f72a7..e7acb6ff149 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -86,7 +86,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { "type1", jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("loc").field("type", "geo_point").endObject().endObject().endObject().endObject())); - ensureYellow(); List indexBuilders = new ArrayList<>(); indexBuilders.add(client().prepareIndex() @@ -184,7 +183,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { "type1", jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("num").field("type", "double").endObject().endObject().endObject().endObject())); - ensureYellow(); // add tw docs within offset List indexBuilders = new ArrayList<>(); @@ -267,7 +265,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { .endObject() .endObject() .endObject())); - ensureYellow(); List indexBuilders = new ArrayList<>(); indexBuilders.add(client().prepareIndex() @@ -321,7 +318,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { "type1", jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("loc").field("type", "geo_point").endObject().endObject().endObject().endObject())); - ensureYellow(); client().prepareIndex() .setType("type1") @@ -367,7 +363,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { "type1", jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("num").field("type", "double").endObject().endObject().endObject().endObject())); - ensureYellow(); client().prepareIndex().setType("type1").setId("1").setIndex("test").setRefreshPolicy(IMMEDIATE) .setSource(jsonBuilder().startObject().field("test", "value value").field("num", 1.0).endObject()).get(); @@ -448,7 +443,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { "type1", jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("num1").field("type", "date").endObject().endObject().endObject().endObject())); - ensureYellow(); client().index( indexRequest("test").type("type1").id("1") .source(jsonBuilder().startObject().field("test", "value").field("num1", "2013-05-27").endObject())).actionGet(); @@ -485,7 +479,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { .endObject() .endObject() .endObject())); - ensureYellow(); client().index(indexRequest("test").type("type1").id("1") .source(jsonBuilder().startObject().field("test", "value").field("num1", System.currentTimeMillis()).endObject())) .actionGet(); @@ -519,7 +512,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { .endObject().endObject().endObject().endObject()) ); - ensureYellow(); client().index( indexRequest("test").type("type1").id("1") @@ -567,7 +559,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { "type1", jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("num1").field("type", "date").endObject().endObject().endObject().endObject())); - ensureYellow(); DateTime docDate = dt.minusDays(1); String docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthOfYear()) + "-" @@ -625,7 +616,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type", xContentBuilder.string())); - ensureYellow(); int numDocs = 200; List indexBuilders = new ArrayList<>(); @@ -675,7 +665,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { "type", jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("geo").field("type", "geo_point").endObject().endObject().endObject().endObject())); - ensureYellow(); int numDocs = 2; client().index( indexRequest("test").type("type").source( @@ -704,7 +693,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { "type", jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("num").field("type", "text").endObject().endObject().endObject().endObject())); - ensureYellow(); client().index( indexRequest("test").type("type").source( jsonBuilder().startObject().field("test", "value").field("num", Integer.toString(1)).endObject())).actionGet(); @@ -726,7 +714,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { "type", jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("num").field("type", "double").endObject().endObject().endObject().endObject())); - ensureYellow(); client().index( indexRequest("test").type("type").source(jsonBuilder().startObject().field("test", "value").field("num", 1.0).endObject())) .actionGet(); @@ -752,7 +739,6 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { .endObject() .endObject() .endObject())); - ensureYellow(); // Index for testing MIN and MAX IndexRequestBuilder doc1 = client().prepareIndex().setType("type1").setId("1").setIndex("test") diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreBackwardCompatibilityIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreBackwardCompatibilityIT.java index 391db24b932..655aecd8fb5 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreBackwardCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreBackwardCompatibilityIT.java @@ -64,7 +64,6 @@ public class FunctionScoreBackwardCompatibilityIT extends ESBackcompatTestCase { .endObject() .endObject() .endObject())); - ensureYellow(); int numDocs = 10; String[] ids = new String[numDocs]; diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java index 6c7bfdc85a2..b850a028bcf 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java @@ -55,7 +55,6 @@ public class FunctionScoreFieldValueIT extends ESIntegTestCase { .endObject() .endObject() .endObject()).get()); - ensureYellow(); client().prepareIndex("test", "type1", "1").setSource("test", 5, "body", "foo").get(); client().prepareIndex("test", "type1", "2").setSource("test", 17, "body", "foo").get(); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java index e5be795d8f5..c79f803fefe 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/FunctionScorePluginIT.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.Explanation; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Priority; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -32,8 +31,8 @@ import org.elasticsearch.index.query.functionscore.DecayFunctionBuilder; import org.elasticsearch.index.query.functionscore.DecayFunctionParser; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; @@ -41,7 +40,9 @@ import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import java.io.IOException; import java.util.Collection; +import java.util.List; +import static java.util.Collections.singletonList; import static org.elasticsearch.client.Requests.indexRequest; import static org.elasticsearch.client.Requests.searchRequest; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -60,6 +61,11 @@ public class FunctionScorePluginIT extends ESIntegTestCase { return pluginList(CustomDistanceScorePlugin.class); } + @Override + protected Collection> transportClientPlugins() { + return pluginList(CustomDistanceScorePlugin.class); + } + public void testPlugin() throws Exception { client().admin() .indices() @@ -94,16 +100,16 @@ public class FunctionScorePluginIT extends ESIntegTestCase { } - public static class CustomDistanceScorePlugin extends Plugin { - public void onModule(SearchModule scoreModule) { - scoreModule.registerScoreFunction(CustomDistanceScoreBuilder::new, CustomDistanceScoreBuilder.PARSER, - CustomDistanceScoreBuilder.FUNCTION_NAME_FIELD); + public static class CustomDistanceScorePlugin extends Plugin implements SearchPlugin { + @Override + public List> getScoreFunctions() { + return singletonList(new ScoreFunctionSpec<>(CustomDistanceScoreBuilder.NAME, CustomDistanceScoreBuilder::new, + CustomDistanceScoreBuilder.PARSER)); } } public static class CustomDistanceScoreBuilder extends DecayFunctionBuilder { public static final String NAME = "linear_mult"; - public static final ParseField FUNCTION_NAME_FIELD = new ParseField(NAME); public static final ScoreFunctionParser PARSER = new DecayFunctionParser<>( CustomDistanceScoreBuilder::new); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 14d0fc959c3..364186572d9 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -80,7 +80,6 @@ public class QueryRescorerIT extends ESIntegTestCase { for (int i = 0; i < iters; i ++) { client().prepareIndex("test", "type", Integer.toString(i)).setSource("f", Integer.toString(i)).execute().actionGet(); } - ensureYellow(); refresh(); int numShards = getNumShards("test").numPrimaries; @@ -118,7 +117,6 @@ public class QueryRescorerIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "2").setSource("field1", "the quick lazy huge brown fox jumps over the tree ").get(); client().prepareIndex("test", "type1", "3") .setSource("field1", "quick huge brown", "field2", "the quick lazy huge brown fox jumps over the tree").get(); - ensureYellow(); refresh(); SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) @@ -182,7 +180,6 @@ public class QueryRescorerIT extends ESIntegTestCase { client().admin().indices().prepareRefresh("test").execute().actionGet(); client().prepareIndex("test", "type1", "11").setSource("field1", "2st street boston massachusetts").execute().actionGet(); client().prepareIndex("test", "type1", "12").setSource("field1", "3st street boston massachusetts").execute().actionGet(); - ensureYellow(); client().admin().indices().prepareRefresh("test").execute().actionGet(); SearchResponse searchResponse = client() .prepareSearch() @@ -249,7 +246,6 @@ public class QueryRescorerIT extends ESIntegTestCase { client().admin().indices().prepareRefresh("test").execute().actionGet(); client().prepareIndex("test", "type1", "1").setSource("field1", "lexington massachusetts avenue").execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field1", "lexington avenue boston massachusetts road").execute().actionGet(); - ensureYellow(); client().admin().indices().prepareRefresh("test").execute().actionGet(); SearchResponse searchResponse = client() @@ -319,7 +315,6 @@ public class QueryRescorerIT extends ESIntegTestCase { client().admin().indices().prepareRefresh("test").execute().actionGet(); client().prepareIndex("test", "type1", "1").setSource("field1", "lexington massachusetts avenue").execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field1", "lexington avenue boston massachusetts road").execute().actionGet(); - ensureYellow(); client().admin().indices().prepareRefresh("test").execute().actionGet(); SearchResponse searchResponse = client() @@ -355,7 +350,7 @@ public class QueryRescorerIT extends ESIntegTestCase { // and shard id are equal during merging shard results. // This comparator uses a custom tie in case the scores are equal, so that both regular hits and rescored hits // are sorted equally. This is fine since tests only care about the fact the scores should be equal, not ordering. - private final static Comparator searchHitsComparator = new Comparator() { + private static final Comparator searchHitsComparator = new Comparator() { @Override public int compare(SearchHit hit1, SearchHit hit2) { int cmp = Float.compare(hit2.getScore(), hit1.getScore()); @@ -487,7 +482,6 @@ public class QueryRescorerIT extends ESIntegTestCase { client().prepareIndex("test", "type1", "3") .setSource("field1", "quick huge brown", "field2", "the quick lazy huge brown fox jumps over the tree").execute() .actionGet(); - ensureYellow(); refresh(); { diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index 0debdb263af..985605c4e65 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -415,13 +415,13 @@ public class GeoFilterIT extends ESIntegTestCase { assertThat(hit.getId(), equalTo(key)); } - SearchResponse world = client().prepareSearch().addField("pin").setQuery( + SearchResponse world = client().prepareSearch().addStoredField("pin").setQuery( geoBoundingBoxQuery("pin").setCorners(90, -179.99999, -90, 179.99999) ).execute().actionGet(); assertHitCount(world, 53); - SearchResponse distance = client().prepareSearch().addField("pin").setQuery( + SearchResponse distance = client().prepareSearch().addStoredField("pin").setQuery( geoDistanceQuery("pin").distance("425km").point(51.11, 9.851) ).execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterPlugin.java b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterPlugin.java index f6d56b8b39c..0f42fd43f00 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterPlugin.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterPlugin.java @@ -20,11 +20,15 @@ package org.elasticsearch.search.highlight; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.SearchModule; +import org.elasticsearch.plugins.SearchPlugin; -public class CustomHighlighterPlugin extends Plugin { +import java.util.Map; - public void onModule(SearchModule highlightModule) { - highlightModule.registerHighlighter("test-custom", new CustomHighlighter()); +import static java.util.Collections.singletonMap; + +public class CustomHighlighterPlugin extends Plugin implements SearchPlugin { + @Override + public Map getHighlighters() { + return singletonMap("test-custom", new CustomHighlighter()); } } diff --git a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterSearchIT.java index 4b5153b2417..6ddaae996ba 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/CustomHighlighterSearchIT.java @@ -35,7 +35,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHigh import static org.hamcrest.Matchers.equalTo; /** - * + * Integration test for highlighters registered by a plugin. */ @ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) public class CustomHighlighterSearchIT extends ESIntegTestCase { @@ -52,7 +52,6 @@ public class CustomHighlighterSearchIT extends ESIntegTestCase { "name", "arbitrary content", "other_name", "foo", "other_other_name", "bar"), client().prepareIndex("test", "test", "2").setSource( "other_name", "foo", "other_other_name", "bar")); - ensureYellow(); } public void testThatCustomHighlightersAreSupported() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java index 9ebbb5b42e0..17a674d669a 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java @@ -68,6 +68,7 @@ import java.util.TreeSet; import java.util.function.BiConsumer; import java.util.function.Function; +import static java.util.Collections.emptyList; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -83,7 +84,7 @@ public class HighlightBuilderTests extends ESTestCase { @BeforeClass public static void init() { namedWriteableRegistry = new NamedWriteableRegistry(); - indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).getQueryParserRegistry(); + indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false, emptyList()).getQueryParserRegistry(); } @AfterClass @@ -484,14 +485,14 @@ public class HighlightBuilderTests extends ESTestCase { public void testOrderSerialization() throws Exception { try (BytesStreamOutput out = new BytesStreamOutput()) { Order.NONE.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(0)); } } try (BytesStreamOutput out = new BytesStreamOutput()) { Order.SCORE.writeTo(out); - try (StreamInput in = StreamInput.wrap(out.bytes())) { + try (StreamInput in = out.bytes().streamInput()) { assertThat(in.readVInt(), equalTo(1)); } } @@ -738,7 +739,7 @@ public class HighlightBuilderTests extends ESTestCase { private static HighlightBuilder serializedCopy(HighlightBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return new HighlightBuilder(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java index 8d7cdef4ca8..934d0286465 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlighterSearchIT.java @@ -19,10 +19,12 @@ package org.elasticsearch.search.highlight; import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; @@ -64,6 +66,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchPhrasePrefixQuery import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; +import static org.elasticsearch.index.query.QueryBuilders.nestedQuery; import static org.elasticsearch.index.query.QueryBuilders.prefixQuery; import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; @@ -112,7 +115,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { mappings.endObject(); assertAcked(prepareCreate("test") .addMapping("type", mappings)); - ensureYellow(); client().prepareIndex("test", "type", "1") .setSource(jsonBuilder().startObject().field("text", "text").endObject()) .get(); @@ -148,7 +150,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { mappings.endObject(); assertAcked(prepareCreate("test") .addMapping("type", mappings)); - ensureYellow(); client().prepareIndex("test", "type", "1") .setSource(jsonBuilder().startObject().field("unstored_text", "text").field("text", "text").endObject()) .get(); @@ -167,7 +168,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testHighTermFrequencyDoc() throws IOException { assertAcked(prepareCreate("test") .addMapping("test", "name", "type=text,term_vector=with_positions_offsets,store=" + randomBoolean())); - ensureYellow(); StringBuilder builder = new StringBuilder(); for (int i = 0; i < 6000; i++) { builder.append("abc").append(" "); @@ -226,7 +226,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { .putArray("analysis.analyzer.autocomplete.filter", "lowercase", "wordDelimiter") .put("analysis.analyzer.search_autocomplete.tokenizer", "whitespace") .putArray("analysis.analyzer.search_autocomplete.filter", "lowercase", "wordDelimiter"))); - ensureYellow(); client().prepareIndex("test", "test", "1") .setSource("name", "ARCOTEL Hotels Deutschland").get(); refresh(); @@ -329,7 +328,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { .addMapping("type1", "no_long_term", "type=text,term_vector=with_positions_offsets", "long_term", "type=text,term_vector=with_positions_offsets")); - ensureYellow(); client().prepareIndex("test", "type1", "1") .setSource("no_long_term", "This is a test where foo is highlighed and should be highlighted", @@ -364,7 +362,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { .startObject("title").field("type", "text").field("store", false).field("term_vector", "no").endObject() .startObject("attachments").startObject("properties").startObject("body").field("type", "text").field("store", false).field("term_vector", "no").endObject().endObject().endObject() .endObject().endObject().endObject())); - ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { @@ -403,7 +400,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { .startObject("title").field("type", "text").field("store", false).field("term_vector", "with_positions_offsets").endObject() .startObject("attachments").startObject("properties").startObject("body").field("type", "text").field("store", false).field("term_vector", "with_positions_offsets").endObject().endObject().endObject() .endObject().endObject().endObject())); - ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { @@ -442,7 +438,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { .startObject("title").field("type", "text").field("store", false).field("index_options", "offsets").endObject() .startObject("attachments").startObject("properties").startObject("body").field("type", "text").field("store", false).field("index_options", "offsets").endObject().endObject().endObject() .endObject().endObject().endObject())); - ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { @@ -489,7 +484,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testHighlightIssue1994() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=text,store=false", "titleTV", "type=text,store=false,term_vector=with_positions_offsets")); - ensureYellow(); indexRandom(false, client().prepareIndex("test", "type1", "1") .setSource("title", new String[]{"This is a test on the highlighting bug present in elasticsearch", "The bug is bugging us"}, @@ -1003,7 +997,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testSameContent() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=text,store=true,term_vector=with_positions_offsets")); - ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { @@ -1025,7 +1018,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testFastVectorHighlighterOffsetParameter() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=text,store=true,term_vector=with_positions_offsets").get()); - ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { @@ -1048,7 +1040,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testEscapeHtml() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=text,store=true")); - ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < indexRequestBuilders.length; i++) { @@ -1070,7 +1061,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testEscapeHtmlVector() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=text,store=true,term_vector=with_positions_offsets")); - ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { @@ -2095,7 +2085,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { public void testPostingsHighlighterEscapeHtml() throws Exception { assertAcked(prepareCreate("test") .addMapping("type1", "title", "type=text," + randomStoreField() + "index_options=offsets")); - ensureYellow(); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[5]; for (int i = 0; i < 5; i++) { @@ -2575,7 +2564,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { mappings.endObject(); assertAcked(prepareCreate("test") .addMapping("type", mappings)); - ensureYellow(); client().prepareIndex("test", "type", "1") .setSource(jsonBuilder().startObject().field("text", "Arbitrary text field which will should not cause a failure").endObject()) @@ -2607,7 +2595,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { mappings.endObject(); assertAcked(prepareCreate("test") .addMapping("type", mappings)); - ensureYellow(); client().prepareIndex("test", "type", "1") .setSource(jsonBuilder().startObject().field("keyword_field", "some text").endObject()) @@ -2636,7 +2623,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertAcked(prepareCreate("test") .addMapping("type", mappings) .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_2))); - ensureYellow(); client().prepareIndex("test", "type", "1") .setSource(jsonBuilder().startObject().field("string_field", "some text").endObject()) @@ -2649,4 +2635,43 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertThat(search.getHits().totalHits(), equalTo(1L)); assertThat(search.getHits().getAt(0).getHighlightFields().get("string_field").getFragments()[0].string(), equalTo("some text")); } + + public void testACopyFieldWithNestedQuery() throws Exception { + String mapping = jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("foo") + .field("type", "nested") + .startObject("properties") + .startObject("text") + .field("type", "text") + .field("copy_to", "foo_text") + .endObject() + .endObject() + .endObject() + .startObject("foo_text") + .field("type", "text") + .field("term_vector", "with_positions_offsets") + .field("store", true) + .endObject() + .endObject().endObject().endObject().string(); + prepareCreate("test").addMapping("type", mapping).get(); + + client().prepareIndex("test", "type", "1").setSource(jsonBuilder().startObject().startArray("foo") + .startObject().field("text", "brown").endObject() + .startObject().field("text", "cow").endObject() + .endArray().endObject()) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + + SearchResponse searchResponse = client().prepareSearch() + .setQuery(nestedQuery("foo", matchQuery("foo.text", "brown cow"), ScoreMode.None)) + .highlighter(new HighlightBuilder() + .field(new Field("foo_text").highlighterType("fvh")) + .requireFieldMatch(false)) + .get(); + assertHitCount(searchResponse, 1); + HighlightField field = searchResponse.getHits().getAt(0).highlightFields().get("foo_text"); + assertThat(field.getFragments().length, equalTo(2)); + assertThat(field.getFragments()[0].string(), equalTo("brown")); + assertThat(field.getFragments()[1].string(), equalTo("cow")); + } } diff --git a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java index 01f98564814..780e8cbcad4 100644 --- a/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java +++ b/core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.MockScriptEngine; +import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchHit; @@ -47,6 +48,8 @@ import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Locale; +import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -71,9 +74,17 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class InnerHitsIT extends ESIntegTestCase { + @Override protected Collection> nodePlugins() { - return pluginList(MockScriptEngine.TestPlugin.class); + return Collections.singleton(CustomScriptPlugin.class); + } + + public static class CustomScriptPlugin extends MockScriptPlugin { + @Override + protected Map, Object>> pluginScripts() { + return Collections.emptyMap(); + } } public void testSimpleNested() throws Exception { @@ -156,7 +167,7 @@ public class InnerHitsIT extends ESIntegTestCase { .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit( new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message")) .setExplain(true) - .addFieldDataField("comments.message") + .addDocValueField("comments.message") .addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap())) .setSize(1) )).get(); @@ -287,7 +298,7 @@ public class InnerHitsIT extends ESIntegTestCase { .setQuery( hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit( new InnerHitBuilder() - .addFieldDataField("message") + .addDocValueField("message") .setHighlightBuilder(new HighlightBuilder().field("message")) .setExplain(true).setSize(1) .addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, diff --git a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java index 77fc2f0e6a9..dedd47d3e43 100644 --- a/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java +++ b/core/src/test/java/org/elasticsearch/search/internal/InternalSearchHitTests.java @@ -67,7 +67,7 @@ public class InternalSearchHitTests extends ESTestCase { context.streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM); BytesStreamOutput output = new BytesStreamOutput(); hits.writeTo(output, context); - InputStream input = new ByteArrayInputStream(output.bytes().toBytes()); + InputStream input = output.bytes().streamInput(); context = new InternalSearchHits.StreamContext(); context.streamShardTarget(InternalSearchHits.StreamContext.ShardTargetType.STREAM); InternalSearchHits results = InternalSearchHits.readSearchHits(new InputStreamStreamInput(input), context); diff --git a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java b/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java index ef7a4ecc7ce..300c4f141b0 100644 --- a/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java +++ b/core/src/test/java/org/elasticsearch/search/matchedqueries/MatchedQueriesIT.java @@ -362,8 +362,8 @@ public class MatchedQueriesIT extends ESIntegTestCase { refresh(); QueryBuilder[] queries = new QueryBuilder[]{ - wrapperQuery(matchQuery("content", "amet").queryName("abc").buildAsBytes().toUtf8()), - constantScoreQuery(wrapperQuery(termQuery("content", "amet").queryName("abc").buildAsBytes().toUtf8())) + wrapperQuery(matchQuery("content", "amet").queryName("abc").buildAsBytes().utf8ToString()), + constantScoreQuery(wrapperQuery(termQuery("content", "amet").queryName("abc").buildAsBytes().utf8ToString())) }; for (QueryBuilder query : queries) { SearchResponse searchResponse = client().prepareSearch() diff --git a/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java b/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java index 49046bae009..3758715cbac 100644 --- a/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java +++ b/core/src/test/java/org/elasticsearch/search/preference/SearchPreferenceIT.java @@ -98,7 +98,7 @@ public class SearchPreferenceIT extends ESIntegTestCase { } public void testSimplePreference() throws Exception { - client().admin().indices().prepareCreate("test").setSettings("number_of_replicas=1").get(); + client().admin().indices().prepareCreate("test").setSettings("{\"number_of_replicas\": 1}").get(); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "value1").execute().actionGet(); @@ -131,7 +131,7 @@ public class SearchPreferenceIT extends ESIntegTestCase { } public void testReplicaPreference() throws Exception { - client().admin().indices().prepareCreate("test").setSettings("number_of_replicas=0").get(); + client().admin().indices().prepareCreate("test").setSettings("{\"number_of_replicas\": 0}").get(); ensureGreen(); client().prepareIndex("test", "type1").setSource("field1", "value1").execute().actionGet(); @@ -147,7 +147,7 @@ public class SearchPreferenceIT extends ESIntegTestCase { SearchResponse resp = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica_first").execute().actionGet(); assertThat(resp.getHits().totalHits(), equalTo(1L)); - client().admin().indices().prepareUpdateSettings("test").setSettings("number_of_replicas=1").get(); + client().admin().indices().prepareUpdateSettings("test").setSettings("{\"number_of_replicas\": 1}").get(); ensureGreen("test"); resp = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_replica").execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index ffad39bc3f2..152709d628d 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -38,8 +38,6 @@ import org.apache.lucene.util.TestUtil; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.profile.ProfileResult; -import org.elasticsearch.search.profile.query.QueryProfiler; -import org.elasticsearch.search.profile.query.QueryTimingType; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -100,6 +98,13 @@ public class QueryProfilerTests extends ESTestCase { assertThat(breakdown.get(QueryTimingType.SCORE.toString()).longValue(), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count").longValue(), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L)); + long rewriteTime = profiler.getRewriteTime(); assertThat(rewriteTime, greaterThan(0L)); } @@ -119,6 +124,13 @@ public class QueryProfilerTests extends ESTestCase { assertThat(breakdown.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L)); assertThat(breakdown.get(QueryTimingType.MATCH.toString()).longValue(), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count").longValue(), equalTo(0L)); + long rewriteTime = profiler.getRewriteTime(); assertThat(rewriteTime, greaterThan(0L)); } @@ -154,6 +166,13 @@ public class QueryProfilerTests extends ESTestCase { assertThat(breakdown.get(QueryTimingType.SCORE.toString()).longValue(), equalTo(0L)); assertThat(breakdown.get(QueryTimingType.MATCH.toString()).longValue(), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count").longValue(), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count").longValue(), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count").longValue(), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count").longValue(), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count").longValue(), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count").longValue(), greaterThan(0L)); + long rewriteTime = profiler.getRewriteTime(); assertThat(rewriteTime, greaterThan(0L)); diff --git a/core/src/test/java/org/elasticsearch/search/query/ExistsIT.java b/core/src/test/java/org/elasticsearch/search/query/ExistsIT.java index 54515c6a2b0..8024112289e 100644 --- a/core/src/test/java/org/elasticsearch/search/query/ExistsIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/ExistsIT.java @@ -47,7 +47,6 @@ public class ExistsIT extends ESIntegTestCase { // TODO: move this to a unit test somewhere... public void testEmptyIndex() throws Exception { createIndex("test"); - ensureYellow("test"); SearchResponse resp = client().prepareSearch("test").setQuery(QueryBuilders.existsQuery("foo")).execute().actionGet(); assertSearchResponse(resp); resp = client().prepareSearch("test").setQuery(QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery("foo"))).execute().actionGet(); @@ -118,7 +117,6 @@ public class ExistsIT extends ESIntegTestCase { expected.put("bar.bar.bar", 1); expected.put("foobar", 0); - ensureYellow("idx"); final long numDocs = sources.length; SearchResponse allDocs = client().prepareSearch("idx").setSize(sources.length).get(); assertSearchResponse(allDocs); diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 875256a0f92..766aff8d274 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -623,7 +623,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { assertFirstHit(searchResponse, hasId("ultimate1")); } - private static final void assertEquivalent(String query, SearchResponse left, SearchResponse right) { + private static void assertEquivalent(String query, SearchResponse left, SearchResponse right) { assertNoFailures(left); assertNoFailures(right); SearchHits leftHits = left.getHits(); diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index f965d3ac5fd..ed38fe8405a 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -55,6 +55,7 @@ import org.junit.BeforeClass; import java.io.IOException; +import static java.util.Collections.emptyList; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -70,7 +71,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { @BeforeClass public static void init() { namedWriteableRegistry = new NamedWriteableRegistry(); - indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).getQueryParserRegistry(); + indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false, emptyList()).getQueryParserRegistry(); } @AfterClass @@ -340,7 +341,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { private static RescoreBuilder serializedCopy(RescoreBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { output.writeNamedWriteable(original); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return in.readNamedWriteable(RescoreBuilder.class); } } diff --git a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java index 3bce6e10f83..275bc1c0c81 100644 --- a/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/core/src/test/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.search.RestClearScrollAction; @@ -68,7 +69,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; /** - * + * Tests for scrolling. */ public class SearchScrollIT extends ESIntegTestCase { public void testSimpleScrollQueryThenFetch() throws Exception { @@ -419,11 +420,20 @@ public class SearchScrollIT extends ESIntegTestCase { assertThrows(internalCluster().transportClient().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)), RestStatus.NOT_FOUND); } + /** + * Tests that we use an optimization shrinking the batch to the size of the shard. Thus the Integer.MAX_VALUE window doesn't OOM us. + */ public void testDeepScrollingDoesNotBlowUp() throws Exception { client().prepareIndex("index", "type", "1") .setSource("field", "value") .setRefreshPolicy(IMMEDIATE) .execute().get(); + /* + * Disable the max result window setting for this test because it'll reject the search's unreasonable batch size. We want + * unreasonable batch sizes to just OOM. + */ + client().admin().indices().prepareUpdateSettings("index") + .setSettings(Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), Integer.MAX_VALUE)).get(); for (SearchType searchType : SearchType.values()) { SearchRequestBuilder builder = client().prepareSearch("index") diff --git a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index 3c675926328..89881a9000f 100644 --- a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -56,7 +56,7 @@ public class SearchAfterBuilderTests extends ESTestCase { namedWriteableRegistry = new NamedWriteableRegistry(); indicesQueriesRegistry = new IndicesQueriesRegistry(); QueryParser parser = MatchAllQueryBuilder::fromXContent; - indicesQueriesRegistry.register(parser, MatchAllQueryBuilder.QUERY_NAME_FIELD); + indicesQueriesRegistry.register(parser, MatchAllQueryBuilder.NAME); } @AfterClass @@ -65,7 +65,7 @@ public class SearchAfterBuilderTests extends ESTestCase { indicesQueriesRegistry = null; } - private final SearchAfterBuilder randomSearchFromBuilder() throws IOException { + private SearchAfterBuilder randomSearchFromBuilder() throws IOException { int numSearchFrom = randomIntBetween(1, 10); SearchAfterBuilder searchAfterBuilder = new SearchAfterBuilder(); Object[] values = new Object[numSearchFrom]; @@ -112,7 +112,7 @@ public class SearchAfterBuilderTests extends ESTestCase { // ensure that every number type remain the same before/after xcontent (de)serialization. // This is not a problem because the final type of each field value is extracted from associated sort field. // This little trick ensure that equals and hashcode are the same when using the xcontent serialization. - private final SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException { + private SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException { int numSearchAfter = randomIntBetween(1, 10); XContentBuilder jsonBuilder = XContentFactory.jsonBuilder(); jsonBuilder.startObject(); @@ -164,7 +164,7 @@ public class SearchAfterBuilderTests extends ESTestCase { private static SearchAfterBuilder serializedCopy(SearchAfterBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return new SearchAfterBuilder(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java index 217f97ace0a..fd83fe3add1 100644 --- a/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java @@ -79,7 +79,7 @@ public class SliceBuilderTests extends ESTestCase { namedWriteableRegistry = new NamedWriteableRegistry(); indicesQueriesRegistry = new IndicesQueriesRegistry(); QueryParser parser = MatchAllQueryBuilder::fromXContent; - indicesQueriesRegistry.register(parser, MatchAllQueryBuilder.QUERY_NAME_FIELD); + indicesQueriesRegistry.register(parser, MatchAllQueryBuilder.NAME); } @AfterClass @@ -88,7 +88,7 @@ public class SliceBuilderTests extends ESTestCase { indicesQueriesRegistry = null; } - private final SliceBuilder randomSliceBuilder() throws IOException { + private SliceBuilder randomSliceBuilder() throws IOException { int max = randomIntBetween(2, MAX_SLICE); int id = randomInt(max - 1); String field = randomAsciiOfLengthBetween(5, 20); @@ -99,7 +99,7 @@ public class SliceBuilderTests extends ESTestCase { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); try (StreamInput in = - new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return new SliceBuilder(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index bdd5c76534c..9673ed8ef00 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -75,6 +75,7 @@ import java.nio.file.Path; import java.util.Collections; import java.util.Map; +import static java.util.Collections.emptyList; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -106,7 +107,7 @@ public abstract class AbstractSortTestCase> extends EST }; namedWriteableRegistry = new NamedWriteableRegistry(); - indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry).getQueryParserRegistry(); + indicesQueriesRegistry = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false, emptyList()).getQueryParserRegistry(); } @AfterClass @@ -272,7 +273,7 @@ public abstract class AbstractSortTestCase> extends EST private T copyItem(T original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return (T) namedWriteableRegistry.getReader(SortBuilder.class, original.getWriteableName()).read(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java index 99183b11c58..a834f1555d5 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -94,7 +94,6 @@ public class FieldSortIT extends ESIntegTestCase { client().prepareIndex("test_" + i, "foo", "" + i).setSource("{\"entry\": " + i + "}").get(); } } - ensureYellow(); refresh(); // sort DESC SearchResponse searchResponse = client().prepareSearch() @@ -146,7 +145,6 @@ public class FieldSortIT extends ESIntegTestCase { } int docs = builders.size(); indexRandom(true, builders); - ensureYellow(); SearchResponse allDocsResponse = client().prepareSearch().setQuery( QueryBuilders.boolQuery().must(QueryBuilders.termQuery("foo", "bar")).must( QueryBuilders.rangeQuery("timeUpdated").gte("2014/0" + randomIntBetween(1, 7) + "/01"))) @@ -858,7 +856,6 @@ public class FieldSortIT extends ESIntegTestCase { public void testIgnoreUnmapped() throws Exception { createIndex("test"); - ensureYellow(); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() .field("id", "1") @@ -889,7 +886,7 @@ public class FieldSortIT extends ESIntegTestCase { public void testSortMVField() throws Exception { assertAcked(prepareCreate("test") -.addMapping("type1", + .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("long_values") .field("type", "long").endObject().startObject("int_values").field("type", "integer").endObject() .startObject("short_values").field("type", "short").endObject().startObject("byte_values") diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java index 2d8944c7963..ee4cfbebfaf 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java @@ -576,13 +576,9 @@ public class GeoDistanceIT extends ESIntegTestCase { } public void testDuelOptimizations() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_1_2); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - if (version.before(Version.V_2_2_0)) { - assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", "location", "type=geo_point,lat_lon=true")); - } else { - assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", "location", "type=geo_point")); - } + assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", "location", "type=geo_point,lat_lon=true")); final int numDocs = scaledRandomIntBetween(3000, 10000); List docs = new ArrayList<>(); for (int i = 0; i < numDocs; ++i) { @@ -601,19 +597,12 @@ public class GeoDistanceIT extends ESIntegTestCase { GeoDistanceQueryBuilder qb = QueryBuilders.geoDistanceQuery("location").point(originLat, originLon).distance(distance) .geoDistance(geoDistance); long matches; - if (version.before(Version.V_2_2_0)) { - for (String optimizeBbox : Arrays.asList("none", "memory", "indexed")) { - qb.optimizeBbox(optimizeBbox); - SearchResponse resp = client().prepareSearch("index").setSize(0).setQuery(QueryBuilders.constantScoreQuery(qb)) - .execute().actionGet(); - matches = assertDuelOptimization(resp); - logger.info("{} -> {} hits", optimizeBbox, matches); - } - } else { + for (String optimizeBbox : Arrays.asList("none", "memory", "indexed")) { + qb.optimizeBbox(optimizeBbox); SearchResponse resp = client().prepareSearch("index").setSize(0).setQuery(QueryBuilders.constantScoreQuery(qb)) - .execute().actionGet(); + .execute().actionGet(); matches = assertDuelOptimization(resp); - logger.info("{} hits", matches); + logger.info("{} -> {} hits", optimizeBbox, matches); } } } diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java index 49bd306c1d8..ae3467d16cd 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java @@ -86,7 +86,6 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { indexRandom(true, client().prepareIndex("index", "type", "d1").setSource(d1Builder), client().prepareIndex("index", "type", "d2").setSource(d2Builder)); - ensureYellow(); GeoPoint[] q = new GeoPoint[2]; if (randomBoolean()) { q[0] = new GeoPoint(2, 1); @@ -152,7 +151,6 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { indexRandom(true, client().prepareIndex("index", "type", "d1").setSource(d1Builder), client().prepareIndex("index", "type", "d2").setSource(d2Builder)); - ensureYellow(); GeoPoint q = new GeoPoint(0,0); SearchResponse searchResponse = client().prepareSearch() @@ -211,7 +209,6 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { indexRandom(true, client().prepareIndex("index", "type", "d1").setSource(d1Builder), client().prepareIndex("index", "type", "d2").setSource(d2Builder)); - ensureYellow(); List qHashes = new ArrayList<>(); List qPoints = new ArrayList<>(); @@ -260,7 +257,6 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { indexRandom(true, client().prepareIndex("index", "type", "d1").setSource(jsonBuilder().startObject().startObject(LOCATION_FIELD).field("lat", 1).field("lon", 1).endObject().endObject()), client().prepareIndex("index", "type", "d2").setSource(jsonBuilder().startObject().startObject(LOCATION_FIELD).field("lat", 1).field("lon", 2).endObject().endObject())); - ensureYellow(); String hashPoint = "s037ms06g7h0"; @@ -342,8 +338,6 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase { client().prepareIndex("test1", "type").setSource("str_field", "bcd", "long_field", 3, "double_field", 0.65), client().prepareIndex("test2", "type").setSource()); - ensureYellow("test1", "test2"); - SearchResponse resp = client().prepareSearch("test1", "test2") .addSort(fieldSort("str_field").order(SortOrder.ASC).unmappedType("keyword")) .addSort(fieldSort("str_field2").order(SortOrder.DESC).unmappedType("keyword")).get(); diff --git a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index cc615a22f17..6a4cc61e041 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -192,16 +192,13 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase { - public static CustomSuggester INSTANCE = new CustomSuggester(); + public static final CustomSuggester INSTANCE = new CustomSuggester(); // This is a pretty dumb implementation which returns the original text + fieldName + custom config option + 12 or 123 @Override diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterPlugin.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterPlugin.java index dc78aec69cf..583ac365fad 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterPlugin.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterPlugin.java @@ -19,12 +19,15 @@ package org.elasticsearch.search.suggest; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.SearchModule; +import org.elasticsearch.plugins.SearchPlugin; -public class CustomSuggesterPlugin extends Plugin { +import java.util.Map; - public void onModule(SearchModule searchModule) { - searchModule.registerSuggester("custom", CustomSuggester.INSTANCE); +import static java.util.Collections.singletonMap; + +public class CustomSuggesterPlugin extends Plugin implements SearchPlugin { + @Override + public Map> getSuggesters() { + return singletonMap("custom", CustomSuggester.INSTANCE); } - } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java index 28f8ec49a57..408f46de061 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CustomSuggesterSearchIT.java @@ -51,7 +51,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; /** - * + * Integration test for registering a custom suggester. */ @ClusterScope(scope= Scope.SUITE, numDataNodes =1) public class CustomSuggesterSearchIT extends ESIntegTestCase { @@ -60,6 +60,11 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { return pluginList(CustomSuggesterPlugin.class); } + @Override + protected Collection> transportClientPlugins() { + return pluginList(CustomSuggesterPlugin.class); + } + public void testThatCustomSuggestersCanBeRegisteredAndWork() throws Exception { createIndex("test"); client().prepareIndex("test", "test", "1").setSource(jsonBuilder() @@ -67,7 +72,6 @@ public class CustomSuggesterSearchIT extends ESIntegTestCase { .field("name", "arbitrary content") .endObject()) .setRefreshPolicy(IMMEDIATE).get(); - ensureYellow(); String randomText = randomAsciiOfLength(10); String randomField = randomAsciiOfLength(10); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java index 77dc2e01b56..cb375db8c31 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.search.suggest; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -30,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.suggest.completion.CompletionSuggesterBuilderTests; import org.elasticsearch.search.suggest.completion.WritableTestCase; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilderTests; @@ -40,6 +42,8 @@ import org.junit.BeforeClass; import java.io.IOException; import java.util.Map.Entry; +import static java.util.Collections.emptyList; + public class SuggestBuilderTests extends WritableTestCase { private static NamedWriteableRegistry namedWriteableRegistry; @@ -51,7 +55,7 @@ public class SuggestBuilderTests extends WritableTestCase { @BeforeClass public static void init() { namedWriteableRegistry = new NamedWriteableRegistry(); - suggesters = new Suggesters(namedWriteableRegistry); + suggesters = new SearchModule(Settings.EMPTY, namedWriteableRegistry, false, emptyList()).getSuggesters(); } @AfterClass diff --git a/core/src/test/java/org/elasticsearch/search/suggest/completion/WritableTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/completion/WritableTestCase.java index 818abf64ffc..842a2395653 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/completion/WritableTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/completion/WritableTestCase.java @@ -103,7 +103,7 @@ public abstract class WritableTestCase extends ESTestCase { private M copyModel(M original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), provideNamedWritableRegistry())) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), provideNamedWritableRegistry())) { return readFrom(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 3fd3850b98a..846d3193f6d 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -206,7 +206,7 @@ public class DirectCandidateGeneratorTests extends ESTestCase{ private static DirectCandidateGeneratorBuilder serializedCopy(DirectCandidateGeneratorBuilder original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = StreamInput.wrap(output.bytes())) { + try (StreamInput in = output.bytes().streamInput()) { return new DirectCandidateGeneratorBuilder(in); } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java index 8834eed7f97..4c5b3b8ca60 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/PhraseSuggestionBuilderTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.suggest.phrase; -import org.elasticsearch.script.Template; +import org.elasticsearch.script.Script; import org.elasticsearch.search.suggest.AbstractSuggestionBuilderTestCase; import java.io.IOException; @@ -109,7 +109,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC builder.separator(randomValueOtherThan(builder.separator(), () -> randomAsciiOfLengthBetween(1, 10))); break; case 6: - Template collateQuery = builder.collateQuery(); + Script collateQuery = builder.collateQuery(); if (collateQuery != null) { builder.collateQuery(randomValueOtherThan(collateQuery.getScript(), () -> randomAsciiOfLengthBetween(3, 20))); } else { @@ -156,7 +156,7 @@ public class PhraseSuggestionBuilderTests extends AbstractSuggestionBuilderTestC assertEquals("suggestion field name is empty", e.getMessage()); PhraseSuggestionBuilder builder = new PhraseSuggestionBuilder(randomAsciiOfLengthBetween(2, 20)); - + e = expectThrows(IllegalArgumentException.class, () -> builder.gramSize(0)); assertEquals("gramSize must be >= 1", e.getMessage()); e = expectThrows(IllegalArgumentException.class, () -> builder.gramSize(-1)); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java index f167eefa43d..ff06a74756f 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java @@ -44,7 +44,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.search.suggest.Suggesters; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -67,7 +67,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase { public static void init() { if (namedWriteableRegistry == null) { namedWriteableRegistry = new NamedWriteableRegistry(); - new Suggesters(namedWriteableRegistry); + SearchModule.registerSmoothingModels(namedWriteableRegistry); } } @@ -180,7 +180,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase { static SmoothingModel copyModel(SmoothingModel original) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { original.writeTo(output); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), namedWriteableRegistry)) { return namedWriteableRegistry.getReader(SmoothingModel.class, original.getWriteableName()).read(in); } } diff --git a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 50fb3f9074b..bbd1bec1d45 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/core/src/test/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -279,8 +279,8 @@ public abstract class AbstractSnapshotIntegTestCase extends ESIntegTestCase { } @Override - public void onFailure(String source, Throwable t) { - logger.warn("failed to execute [{}]", t, source); + public void onFailure(String source, Exception e) { + logger.warn("failed to execute [{}]", e, source); } }); diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index 3ba6c875b68..96f30a1a0a6 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -29,8 +29,11 @@ import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.MetaData; @@ -44,6 +47,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.discovery.zen.elect.ElectMasterService; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.store.IndexStore; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.indices.ttl.IndicesTTLService; @@ -166,7 +170,6 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest internalCluster().startNode(); Client client = client(); createIndex("test-idx"); - ensureYellow(); logger.info("--> add custom persistent metadata"); updateClusterState(new ClusterStateUpdater() { @Override @@ -271,7 +274,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest } @Override - public void onFailure(String source, @Nullable Throwable t) { + public void onFailure(String source, @Nullable Exception e) { countDownLatch.countDown(); } @@ -283,8 +286,8 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest countDownLatch.await(); } - private static interface ClusterStateUpdater { - public ClusterState execute(ClusterState currentState) throws Exception; + private interface ClusterStateUpdater { + ClusterState execute(ClusterState currentState) throws Exception; } public void testSnapshotDuringNodeShutdown() throws Exception { @@ -391,8 +394,11 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> making sure that snapshot no longer exists"); assertThrows(client().admin().cluster().prepareGetSnapshots("test-repo").setSnapshots("test-snap").execute(), SnapshotMissingException.class); - // Subtract index file from the count - assertThat("not all files were deleted during snapshot cancellation", numberOfFilesBeforeSnapshot, equalTo(numberOfFiles(repo) - 1)); + // Subtract three files that will remain in the repository: + // (1) index-1 + // (2) index-0 (because we keep the previous version) and + // (3) index-latest + assertThat("not all files were deleted during snapshot cancellation", numberOfFilesBeforeSnapshot, equalTo(numberOfFiles(repo) - 3)); logger.info("--> done"); } @@ -439,9 +445,9 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> create an index that will have no allocated shards"); assertAcked(prepareCreate("test-idx-none", 1, Settings.builder().put("number_of_shards", 6) .put("index.routing.allocation.include.tag", "nowhere") - .put("number_of_replicas", 0))); + .put("number_of_replicas", 0)).setWaitForActiveShards(ActiveShardCount.NONE).get()); + assertTrue(client().admin().indices().prepareExists("test-idx-none").get().isExists()); - logger.info("--> create repository"); logger.info("--> creating repository"); PutRepositoryResponse putRepositoryResponse = client().admin().cluster().preparePutRepository("test-repo") .setType("fs").setSettings(Settings.builder().put("location", randomRepoPath())).execute().actionGet(); @@ -635,6 +641,7 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest .put(MockRepository.Plugin.PASSWORD_SETTING.getKey(), "verysecretpassword") ).get(); + NodeClient nodeClient = internalCluster().getInstance(NodeClient.class); RestGetRepositoriesAction getRepoAction = internalCluster().getInstance(RestGetRepositoriesAction.class); RestRequest getRepoRequest = new FakeRestRequest(); getRepoRequest.params().put("repository", "test-repo"); @@ -644,14 +651,14 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest @Override public void sendResponse(RestResponse response) { try { - assertThat(response.content().toUtf8(), containsString("notsecretusername")); - assertThat(response.content().toUtf8(), not(containsString("verysecretpassword"))); + assertThat(response.content().utf8ToString(), containsString("notsecretusername")); + assertThat(response.content().utf8ToString(), not(containsString("verysecretpassword"))); } catch (AssertionError ex) { getRepoError.set(ex); } getRepoLatch.countDown(); } - }); + }, nodeClient); assertTrue(getRepoLatch.await(1, TimeUnit.SECONDS)); if (getRepoError.get() != null) { throw getRepoError.get(); @@ -665,14 +672,14 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest @Override public void sendResponse(RestResponse response) { try { - assertThat(response.content().toUtf8(), containsString("notsecretusername")); - assertThat(response.content().toUtf8(), not(containsString("verysecretpassword"))); + assertThat(response.content().utf8ToString(), containsString("notsecretusername")); + assertThat(response.content().utf8ToString(), not(containsString("verysecretpassword"))); } catch (AssertionError ex) { clusterStateError.set(ex); } clusterStateLatch.countDown(); } - }); + }, nodeClient); assertTrue(clusterStateLatch.await(1, TimeUnit.SECONDS)); if (clusterStateError.get() != null) { throw clusterStateError.get(); @@ -880,8 +887,6 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest assertAcked(prepareCreate(name, 0, Settings.builder().put("number_of_shards", between(1, 6)) .put("number_of_replicas", between(1, 6)))); - ensureYellow(name); - logger.info("--> indexing some data into {}", name); for (int i = 0; i < between(10, 500); i++) { index(name, "doc", Integer.toString(i), "foo", "bar" + i); diff --git a/core/src/test/java/org/elasticsearch/snapshots/RepositoriesIT.java b/core/src/test/java/org/elasticsearch/snapshots/RepositoriesIT.java index 79d1497912a..639b60d6d09 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/RepositoriesIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/RepositoriesIT.java @@ -40,11 +40,10 @@ import java.util.List; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; -/** - */ @ESIntegTestCase.ClusterScope(minNumDataNodes = 2) public class RepositoriesIT extends AbstractSnapshotIntegTestCase { public void testRepositoryCreation() throws Exception { @@ -153,7 +152,9 @@ public class RepositoriesIT extends AbstractSnapshotIntegTestCase { .get(); fail("Shouldn't be here"); } catch (RepositoryException ex) { - assertThat(ex.toString(), containsString("unsupported url protocol [netdoc]")); + assertThat(ex.toString(), + either(containsString("unsupported url protocol [netdoc]")) + .or(containsString("unknown protocol: netdoc"))); // newer versions of JDK 9 } logger.info("--> trying creating url repository with location that is not registered in path.repo setting"); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 19b46710fea..313bf065b0a 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -39,6 +39,7 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -755,7 +756,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .put("location", randomRepoPath()))); logger.info("--> creating index that cannot be allocated"); - prepareCreate("test-idx", 2, Settings.builder().put(IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + ".tag", "nowhere").put("index.number_of_shards", 3)).get(); + prepareCreate("test-idx", 2, Settings.builder().put(IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_SETTING.getKey() + ".tag", "nowhere").put("index.number_of_shards", 3)).setWaitForActiveShards(ActiveShardCount.NONE).get(); logger.info("--> snapshot"); CreateSnapshotResponse createSnapshotResponse = client.admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx").get(); @@ -818,8 +819,9 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas logger.info("--> delete the last snapshot"); client.admin().cluster().prepareDeleteSnapshot("test-repo", lastSnapshot).get(); - logger.info("--> make sure that number of files is back to what it was when the first snapshot was made"); - assertThat(numberOfFiles(repo), equalTo(numberOfFiles[0])); + logger.info("--> make sure that number of files is back to what it was when the first snapshot was made, " + + "plus one because one backup index-N file should remain"); + assertThat(numberOfFiles(repo), equalTo(numberOfFiles[0] + 1)); } public void testDeleteSnapshotWithMissingIndexAndShardMetadata() throws Exception { @@ -834,7 +836,6 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); createIndex("test-idx-1", "test-idx-2"); - ensureYellow(); logger.info("--> indexing some data"); indexRandom(true, client().prepareIndex("test-idx-1", "doc").setSource("foo", "bar"), @@ -872,7 +873,6 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); createIndex("test-idx-1", "test-idx-2"); - ensureYellow(); logger.info("--> indexing some data"); indexRandom(true, client().prepareIndex("test-idx-1", "doc").setSource("foo", "bar"), @@ -906,7 +906,6 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); createIndex("test-idx-1", "test-idx-2"); - ensureYellow(); logger.info("--> indexing some data"); indexRandom(true, client().prepareIndex("test-idx-1", "doc").setSource("foo", "bar"), @@ -1396,8 +1395,8 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas long snapshotPause = 0L; long restorePause = 0L; for (RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { - snapshotPause += repositoriesService.repository("test-repo").snapshotThrottleTimeInNanos(); - restorePause += repositoriesService.repository("test-repo").restoreThrottleTimeInNanos(); + snapshotPause += repositoriesService.repository("test-repo").getSnapshotThrottleTimeInNanos(); + restorePause += repositoriesService.repository("test-repo").getRestoreThrottleTimeInNanos(); } if (throttleSnapshot) { @@ -2041,7 +2040,7 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { fail(); } @@ -2174,7 +2173,6 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); createIndex("test-idx-1", "test-idx-2", "test-idx-3"); - ensureYellow(); logger.info("--> indexing some data"); indexRandom(true, client().prepareIndex("test-idx-1", "doc").setSource("foo", "bar"), diff --git a/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java index 31c0a193f07..a09ee908193 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SnapshotBackwardsCompatibilityIT.java @@ -68,7 +68,6 @@ public class SnapshotBackwardsCompatibilityIT extends ESBackcompatTestCase { String[] indices = new String[indicesBefore.length + indicesAfter.length]; System.arraycopy(indicesBefore, 0, indices, 0, indicesBefore.length); System.arraycopy(indicesAfter, 0, indices, indicesBefore.length, indicesAfter.length); - ensureYellow(); logger.info("--> indexing some data"); IndexRequestBuilder[] buildersBefore = new IndexRequestBuilder[randomIntBetween(10, 200)]; for (int i = 0; i < buildersBefore.length; i++) { @@ -171,7 +170,6 @@ public class SnapshotBackwardsCompatibilityIT extends ESBackcompatTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) )); - ensureYellow(); logger.info("--> indexing"); final int numDocs = randomIntBetween(10, 100); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java b/core/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java index 38d858c49aa..c178b2a6f83 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SnapshotRequestsTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.snapshots; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; @@ -75,7 +76,7 @@ public class SnapshotRequestsTests extends ESTestCase { builder.endArray(); } - byte[] bytes = builder.endObject().bytes().toBytes(); + byte[] bytes = BytesReference.toBytes(builder.endObject().bytes()); request.source(bytes); @@ -134,7 +135,7 @@ public class SnapshotRequestsTests extends ESTestCase { builder.endArray(); } - byte[] bytes = builder.endObject().bytes().toBytes(); + byte[] bytes = BytesReference.toBytes(builder.endObject().bytes()); request.source(bytes); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SnapshotTests.java b/core/src/test/java/org/elasticsearch/snapshots/SnapshotTests.java index cb297785e4b..41cfa3d4141 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SnapshotTests.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SnapshotTests.java @@ -20,12 +20,10 @@ package org.elasticsearch.snapshots; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.io.stream.ByteBufferStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.nio.ByteBuffer; import static org.hamcrest.CoreMatchers.equalTo; @@ -50,8 +48,7 @@ public class SnapshotTests extends ESTestCase { final Snapshot original = new Snapshot(randomAsciiOfLength(randomIntBetween(2, 8)), snapshotId); final BytesStreamOutput out = new BytesStreamOutput(); original.writeTo(out); - final ByteBufferStreamInput in = new ByteBufferStreamInput(ByteBuffer.wrap(out.bytes().toBytes())); - assertThat(new Snapshot(in), equalTo(original)); + assertThat(new Snapshot(out.bytes().streamInput()), equalTo(original)); } } diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobContainerWrapper.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobContainerWrapper.java index dd1bc637a93..9a66100ae17 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobContainerWrapper.java +++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/BlobContainerWrapper.java @@ -68,16 +68,6 @@ public class BlobContainerWrapper implements BlobContainer { delegate.deleteBlob(blobName); } - @Override - public void deleteBlobs(Collection blobNames) throws IOException { - delegate.deleteBlobs(blobNames); - } - - @Override - public void deleteBlobsByPrefix(String blobNamePrefix) throws IOException { - delegate.deleteBlobsByPrefix(blobNamePrefix); - } - @Override public Map listBlobs() throws IOException { return delegate.listBlobs(); diff --git a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java index bb6f5cc4f74..d17f0ea82c9 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java +++ b/core/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java @@ -19,29 +19,6 @@ package org.elasticsearch.snapshots.mockstore; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.snapshots.SnapshotId; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.blobstore.BlobContainer; -import org.elasticsearch.common.blobstore.BlobMetaData; -import org.elasticsearch.common.blobstore.BlobPath; -import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.PathUtils; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.snapshots.IndexShardRepository; -import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; -import org.elasticsearch.repositories.RepositoriesModule; -import org.elasticsearch.repositories.RepositoryName; -import org.elasticsearch.repositories.RepositorySettings; -import org.elasticsearch.repositories.fs.FsRepository; - import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; @@ -49,22 +26,43 @@ import java.nio.file.Path; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicLong; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobMetaData; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.snapshots.SnapshotId; + public class MockRepository extends FsRepository { - public static class Plugin extends org.elasticsearch.plugins.Plugin { + public static class Plugin extends org.elasticsearch.plugins.Plugin implements RepositoryPlugin { public static final Setting USERNAME_SETTING = Setting.simpleString("secret.mock.username", Property.NodeScope); public static final Setting PASSWORD_SETTING = Setting.simpleString("secret.mock.password", Property.NodeScope, Property.Filtered); - public void onModule(RepositoriesModule repositoriesModule) { - repositoriesModule.registerRepository("mock", MockRepository.class, BlobStoreIndexShardRepository.class); + + @Override + public Map getRepositories(Environment env) { + return Collections.singletonMap("mock", (metadata) -> new MockRepository(metadata, env)); } @Override @@ -99,45 +97,41 @@ public class MockRepository extends FsRepository { private volatile boolean blocked = false; - @Inject - public MockRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, ClusterService clusterService, Environment environment) throws IOException { - super(name, overrideSettings(repositorySettings, clusterService), indexShardRepository, environment); - randomControlIOExceptionRate = repositorySettings.settings().getAsDouble("random_control_io_exception_rate", 0.0); - randomDataFileIOExceptionRate = repositorySettings.settings().getAsDouble("random_data_file_io_exception_rate", 0.0); - maximumNumberOfFailures = repositorySettings.settings().getAsLong("max_failure_number", 100L); - blockOnControlFiles = repositorySettings.settings().getAsBoolean("block_on_control", false); - blockOnDataFiles = repositorySettings.settings().getAsBoolean("block_on_data", false); - blockOnInitialization = repositorySettings.settings().getAsBoolean("block_on_init", false); - randomPrefix = repositorySettings.settings().get("random", "default"); - waitAfterUnblock = repositorySettings.settings().getAsLong("wait_after_unblock", 0L); + public MockRepository(RepositoryMetaData metadata, Environment environment) throws IOException { + super(overrideSettings(metadata, environment), environment); + randomControlIOExceptionRate = metadata.settings().getAsDouble("random_control_io_exception_rate", 0.0); + randomDataFileIOExceptionRate = metadata.settings().getAsDouble("random_data_file_io_exception_rate", 0.0); + maximumNumberOfFailures = metadata.settings().getAsLong("max_failure_number", 100L); + blockOnControlFiles = metadata.settings().getAsBoolean("block_on_control", false); + blockOnDataFiles = metadata.settings().getAsBoolean("block_on_data", false); + blockOnInitialization = metadata.settings().getAsBoolean("block_on_init", false); + randomPrefix = metadata.settings().get("random", "default"); + waitAfterUnblock = metadata.settings().getAsLong("wait_after_unblock", 0L); logger.info("starting mock repository with random prefix {}", randomPrefix); mockBlobStore = new MockBlobStore(super.blobStore()); } @Override - public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData metaData) { + public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData clusterMetadata) { if (blockOnInitialization ) { blockExecution(); } - super.initializeSnapshot(snapshotId, indices, metaData); + super.initializeSnapshot(snapshotId, indices, clusterMetadata); } - private static RepositorySettings overrideSettings(RepositorySettings repositorySettings, ClusterService clusterService) { - if (repositorySettings.settings().getAsBoolean("localize_location", false)) { - return new RepositorySettings( - repositorySettings.globalSettings(), - localizeLocation(repositorySettings.settings(), clusterService)); + private static RepositoryMetaData overrideSettings(RepositoryMetaData metadata, Environment environment) { + // TODO: use another method of testing not being able to read the test file written by the master... + // this is super duper hacky + if (metadata.settings().getAsBoolean("localize_location", false)) { + Path location = PathUtils.get(metadata.settings().get("location")); + location = location.resolve(Integer.toString(environment.hashCode())); + return new RepositoryMetaData(metadata.name(), metadata.type(), + Settings.builder().put(metadata.settings()).put("location", location.toAbsolutePath()).build()); } else { - return repositorySettings; + return metadata; } } - private static Settings localizeLocation(Settings settings, ClusterService clusterService) { - Path location = PathUtils.get(settings.get("location")); - location = location.resolve(clusterService.localNode().getId()); - return Settings.builder().put(settings).put("location", location.toAbsolutePath()).build(); - } - private long incrementAndGetFailureCount() { return failureCounter.incrementAndGet(); } @@ -305,12 +299,6 @@ public class MockRepository extends FsRepository { super.deleteBlob(blobName); } - @Override - public void deleteBlobsByPrefix(String blobNamePrefix) throws IOException { - maybeIOExceptionOrBlock(blobNamePrefix); - super.deleteBlobsByPrefix(blobNamePrefix); - } - @Override public Map listBlobs() throws IOException { maybeIOExceptionOrBlock(""); diff --git a/core/src/test/java/org/elasticsearch/tasks/PersistedTaskInfoTests.java b/core/src/test/java/org/elasticsearch/tasks/PersistedTaskInfoTests.java index bfbb2dff4c7..5b507436129 100644 --- a/core/src/test/java/org/elasticsearch/tasks/PersistedTaskInfoTests.java +++ b/core/src/test/java/org/elasticsearch/tasks/PersistedTaskInfoTests.java @@ -37,7 +37,7 @@ import java.util.Map; import java.util.TreeMap; /** - * Round trip tests for {@link PersistedTaskInfo} and those classes that it includes like {@link TaskInfo} and {@link RawTaskStatus}. + * Round trip tests for {@link PersistedTaskInfo} and those classes that it includes like {@link TaskInfo} and {@link RawTaskStatus}. */ public class PersistedTaskInfoTests extends ESTestCase { public void testBinaryRoundTrip() throws IOException { @@ -47,7 +47,7 @@ public class PersistedTaskInfoTests extends ESTestCase { PersistedTaskInfo read; try (BytesStreamOutput out = new BytesStreamOutput()) { result.writeTo(out); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes()), registry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry)) { read = new PersistedTaskInfo(in); } } catch (IOException e) { diff --git a/core/src/test/java/org/elasticsearch/tasks/TaskIdTests.java b/core/src/test/java/org/elasticsearch/tasks/TaskIdTests.java index b13de26b976..f7990cfacb7 100644 --- a/core/src/test/java/org/elasticsearch/tasks/TaskIdTests.java +++ b/core/src/test/java/org/elasticsearch/tasks/TaskIdTests.java @@ -57,7 +57,7 @@ public class TaskIdTests extends ESTestCase { taskId.writeTo(out); BytesReference bytes = out.bytes(); assertEquals(expectedSize, bytes.length()); - try (StreamInput in = StreamInput.wrap(bytes)) { + try (StreamInput in = bytes.streamInput()) { return TaskId.readFromStream(in); } } diff --git a/core/src/test/java/org/elasticsearch/test/MockLogAppender.java b/core/src/test/java/org/elasticsearch/test/MockLogAppender.java index 9e4a881b25b..8f10ccd6537 100644 --- a/core/src/test/java/org/elasticsearch/test/MockLogAppender.java +++ b/core/src/test/java/org/elasticsearch/test/MockLogAppender.java @@ -34,7 +34,7 @@ import static org.hamcrest.MatcherAssert.assertThat; */ public class MockLogAppender extends AppenderSkeleton { - private final static String COMMON_PREFIX = System.getProperty("es.logger.prefix", "org.elasticsearch."); + private static final String COMMON_PREFIX = System.getProperty("es.logger.prefix", "org.elasticsearch."); private List expectations; @@ -75,7 +75,7 @@ public class MockLogAppender extends AppenderSkeleton { void assertMatched(); } - public static abstract class AbstractEventExpectation implements LoggingExpectation { + public abstract static class AbstractEventExpectation implements LoggingExpectation { protected final String name; protected final String logger; protected final Level level; diff --git a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java index 3193aaf458e..c36082f1475 100644 --- a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java +++ b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java @@ -20,7 +20,7 @@ package org.elasticsearch.test; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.RoutingService; +import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.discovery.Discovery; @@ -41,7 +41,7 @@ public class NoopDiscovery implements Discovery { } @Override - public void setRoutingService(RoutingService routingService) { + public void setAllocationService(AllocationService allocationService) { } @@ -86,17 +86,11 @@ public class NoopDiscovery implements Discovery { } @Override - public Discovery start() { - return null; - } + public void start() {} @Override - public Discovery stop() { - return null; - } + public void stop() {} @Override - public void close() { - - } + public void close() {} } diff --git a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java index 897fa44b593..20c82e6f518 100644 --- a/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java +++ b/core/src/test/java/org/elasticsearch/test/geo/RandomShapeGenerator.java @@ -20,16 +20,9 @@ package org.elasticsearch.test.geo; import com.carrotsearch.randomizedtesting.generators.RandomInts; -import org.locationtech.spatial4j.context.jts.JtsSpatialContext; -import org.locationtech.spatial4j.distance.DistanceUtils; -import org.locationtech.spatial4j.exception.InvalidShapeException; -import org.locationtech.spatial4j.shape.Point; -import org.locationtech.spatial4j.shape.Rectangle; -import org.locationtech.spatial4j.shape.impl.Range; import com.vividsolutions.jts.algorithm.ConvexHull; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.geo.builders.CoordinateCollection; import org.elasticsearch.common.geo.builders.CoordinatesBuilder; @@ -42,6 +35,12 @@ import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.search.geo.GeoShapeQueryTests; import org.junit.Assert; +import org.locationtech.spatial4j.context.jts.JtsSpatialContext; +import org.locationtech.spatial4j.distance.DistanceUtils; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Point; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.impl.Range; import java.util.Random; @@ -230,14 +229,10 @@ public class RandomShapeGenerator extends RandomGeoGenerator { // The validate flag will check for these possibilities and bail if an incorrect geometry is created try { pgb.build(); - } catch (Throwable e) { + } catch (AssertionError | InvalidShapeException e) { // jts bug may occasionally misinterpret coordinate order causing an unhelpful ('geom' assertion) // or InvalidShapeException - if (e instanceof InvalidShapeException || e instanceof AssertionError) { - return null; - } - // throw any other exception - throw e; + return null; } } return pgb; diff --git a/core/src/test/java/org/elasticsearch/threadpool/ScheduleWithFixedDelayTests.java b/core/src/test/java/org/elasticsearch/threadpool/ScheduleWithFixedDelayTests.java new file mode 100644 index 00000000000..dd1f4991f95 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/threadpool/ScheduleWithFixedDelayTests.java @@ -0,0 +1,295 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.threadpool; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.BaseFuture; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.node.Node; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool.Cancellable; +import org.elasticsearch.threadpool.ThreadPool.Names; +import org.elasticsearch.threadpool.ThreadPool.ReschedulingRunnable; +import org.junit.After; +import org.junit.Before; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.isOneOf; +import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; + +/** + * Unit tests for the scheduling of tasks with a fixed delay + */ +public class ScheduleWithFixedDelayTests extends ESTestCase { + + private ThreadPool threadPool; + + @Before + public void setup() { + threadPool = new ThreadPool(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "fixed delay tests").build()); + } + + @After + public void shutdown() throws Exception { + terminate(threadPool); + } + + public void testDoesNotRescheduleUntilExecutionFinished() throws Exception { + final TimeValue delay = TimeValue.timeValueMillis(100L); + final CountDownLatch startLatch = new CountDownLatch(1); + final CountDownLatch pauseLatch = new CountDownLatch(1); + ThreadPool threadPool = mock(ThreadPool.class); + final Runnable runnable = () -> { + // notify that the runnable is started + startLatch.countDown(); + try { + // wait for other thread to un-pause + pauseLatch.await(); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + } + }; + ReschedulingRunnable reschedulingRunnable = new ReschedulingRunnable(runnable, delay, Names.GENERIC, threadPool); + // this call was made during construction of the runnable + verify(threadPool, times(1)).schedule(delay, Names.GENERIC, reschedulingRunnable); + + // create a thread and start the runnable + Thread runThread = new Thread() { + @Override + public void run() { + reschedulingRunnable.run(); + } + }; + runThread.start(); + + // wait for the runnable to be started and ensure the runnable hasn't used the threadpool again + startLatch.await(); + verifyNoMoreInteractions(threadPool); + + // un-pause the runnable and allow it to complete execution + pauseLatch.countDown(); + runThread.join(); + + // validate schedule was called again + verify(threadPool, times(2)).schedule(delay, Names.GENERIC, reschedulingRunnable); + } + + public void testThatRunnableIsRescheduled() throws Exception { + final CountDownLatch latch = new CountDownLatch(scaledRandomIntBetween(2, 16)); + final Runnable countingRunnable = () -> { + if (rarely()) { + throw new ElasticsearchException("sometimes we throw before counting down"); + } + + latch.countDown(); + + if (randomBoolean()) { + throw new ElasticsearchException("this shouldn't cause the test to fail!"); + } + }; + + Cancellable cancellable = threadPool.scheduleWithFixedDelay(countingRunnable, TimeValue.timeValueMillis(10L), Names.GENERIC); + assertNotNull(cancellable); + + // wait for the number of successful count down operations + latch.await(); + + // cancel + cancellable.cancel(); + assertTrue(cancellable.isCancelled()); + } + + public void testCancellingRunnable() throws Exception { + final boolean shouldThrow = randomBoolean(); + final AtomicInteger counter = new AtomicInteger(scaledRandomIntBetween(2, 16)); + final CountDownLatch doneLatch = new CountDownLatch(1); + final AtomicReference cancellableRef = new AtomicReference<>(); + final AtomicBoolean runAfterDone = new AtomicBoolean(false); + final Runnable countingRunnable = () -> { + + if (doneLatch.getCount() == 0) { + runAfterDone.set(true); + logger.warn("this runnable ran after it was cancelled"); + } + + final Cancellable cancellable = cancellableRef.get(); + if (cancellable == null) { + // wait for the cancellable to be present before we really start so we can accurately know we cancelled + return; + } + + // rarely throw an exception before counting down + if (shouldThrow && rarely()) { + throw new RuntimeException("throw before count down"); + } + + final int count = counter.decrementAndGet(); + + // see if we have counted down to zero or below yet. the exception throwing could make us count below zero + if (count <= 0) { + cancellable.cancel(); + doneLatch.countDown(); + } + + // rarely throw an exception after execution + if (shouldThrow && rarely()) { + throw new RuntimeException("throw at end"); + } + }; + Cancellable cancellable = threadPool.scheduleWithFixedDelay(countingRunnable, TimeValue.timeValueMillis(10L), Names.GENERIC); + cancellableRef.set(cancellable); + // wait for the runnable to finish + doneLatch.await(); + + // the runnable should have cancelled itself + assertTrue(cancellable.isCancelled()); + assertFalse(runAfterDone.get()); + + // rarely wait and make sure the runnable didn't run at the next interval + if (rarely()) { + assertFalse(awaitBusy(runAfterDone::get, 1L, TimeUnit.SECONDS)); + } + } + + public void testBlockingCallOnSchedulerThreadFails() throws Exception { + final BaseFuture future = new BaseFuture() {}; + final TestFuture resultsFuture = new TestFuture(); + final boolean getWithTimeout = randomBoolean(); + + final Runnable runnable = () -> { + try { + Object obj; + if (getWithTimeout) { + obj = future.get(1L, TimeUnit.SECONDS); + } else { + obj = future.get(); + } + resultsFuture.futureDone(obj); + } catch (Throwable t) { + resultsFuture.futureDone(t); + } + }; + + Cancellable cancellable = threadPool.scheduleWithFixedDelay(runnable, TimeValue.timeValueMillis(10L), Names.SAME); + Object resultingObject = resultsFuture.get(); + assertNotNull(resultingObject); + assertThat(resultingObject, instanceOf(Throwable.class)); + Throwable t = (Throwable) resultingObject; + assertThat(t, instanceOf(AssertionError.class)); + assertThat(t.getMessage(), containsString("Blocking")); + assertFalse(cancellable.isCancelled()); + } + + public void testBlockingCallOnNonSchedulerThreadAllowed() throws Exception { + final TestFuture future = new TestFuture(); + final TestFuture resultsFuture = new TestFuture(); + final boolean rethrow = randomBoolean(); + final boolean getWithTimeout = randomBoolean(); + + final Runnable runnable = () -> { + try { + Object obj; + if (getWithTimeout) { + obj = future.get(1, TimeUnit.MINUTES); + } else { + obj = future.get(); + } + resultsFuture.futureDone(obj); + } catch (Throwable t) { + resultsFuture.futureDone(t); + if (rethrow) { + throw new RuntimeException(t); + } + } + }; + + final Cancellable cancellable = threadPool.scheduleWithFixedDelay(runnable, TimeValue.timeValueMillis(10L), Names.GENERIC); + assertFalse(resultsFuture.isDone()); + + final Object o = new Object(); + future.futureDone(o); + + final Object resultingObject = resultsFuture.get(); + assertThat(resultingObject, sameInstance(o)); + assertFalse(cancellable.isCancelled()); + } + + public void testOnRejectionCausesCancellation() throws Exception { + final TimeValue delay = TimeValue.timeValueMillis(10L); + terminate(threadPool); + threadPool = new ThreadPool(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "fixed delay tests").build()) { + @Override + public ScheduledFuture schedule(TimeValue delay, String executor, Runnable command) { + if (command instanceof ReschedulingRunnable) { + ((ReschedulingRunnable) command).onRejection(new EsRejectedExecutionException()); + } else { + fail("this should only be called with a rescheduling runnable in this test"); + } + return null; + } + }; + Runnable runnable = () -> {}; + ReschedulingRunnable reschedulingRunnable = new ReschedulingRunnable(runnable, delay, Names.GENERIC, threadPool); + assertTrue(reschedulingRunnable.isCancelled()); + } + + public void testRunnableRunsAtMostOnceAfterCancellation() throws Exception { + final int iterations = scaledRandomIntBetween(1, 12); + final AtomicInteger counter = new AtomicInteger(); + final CountDownLatch doneLatch = new CountDownLatch(iterations); + final Runnable countingRunnable = () -> { + counter.incrementAndGet(); + doneLatch.countDown(); + }; + + final Cancellable cancellable = threadPool.scheduleWithFixedDelay(countingRunnable, TimeValue.timeValueMillis(10L), Names.GENERIC); + doneLatch.await(); + cancellable.cancel(); + final int counterValue = counter.get(); + assertThat(counterValue, isOneOf(iterations, iterations + 1)); + + if (rarely()) { + awaitBusy(() -> { + final int value = counter.get(); + return value == iterations || value == iterations + 1; + }, 50L, TimeUnit.MILLISECONDS); + } + } + + static final class TestFuture extends BaseFuture { + boolean futureDone(Object value) { + return set(value); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index 28267e9beb7..974929dddf2 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/core/src/test/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -19,8 +19,6 @@ package org.elasticsearch.threadpool; -import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; -import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; @@ -36,7 +34,6 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.hamcrest.RegexMatcher; -import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.tribe.TribeIT; import java.io.IOException; @@ -46,19 +43,11 @@ import java.lang.management.ThreadMXBean; import java.util.HashSet; import java.util.Map; import java.util.Set; -import java.util.concurrent.BrokenBarrierException; -import java.util.concurrent.CyclicBarrier; -import java.util.concurrent.Executor; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.sameInstance; /** */ @@ -136,9 +125,9 @@ public class SimpleThreadPoolIT extends ESIntegTestCase { try { new Node(settings); fail("The node startup is supposed to fail"); - } catch(Throwable t) { + } catch(Exception e) { //all good - assertThat(t.getMessage(), containsString("mandatory plugins [non_existing]")); + assertThat(e.getMessage(), containsString("mandatory plugins [non_existing]")); } } diff --git a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java index 486b0635c64..14cf10b8f31 100644 --- a/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java @@ -58,7 +58,7 @@ public class ThreadPoolSerializationTests extends ESTestCase { output.setVersion(Version.CURRENT); info.writeTo(output); - StreamInput input = StreamInput.wrap(output.bytes()); + StreamInput input = output.bytes().streamInput(); ThreadPool.Info newInfo = new ThreadPool.Info(); newInfo.readFrom(input); @@ -70,7 +70,7 @@ public class ThreadPoolSerializationTests extends ESTestCase { output.setVersion(Version.CURRENT); info.writeTo(output); - StreamInput input = StreamInput.wrap(output.bytes()); + StreamInput input = output.bytes().streamInput(); ThreadPool.Info newInfo = new ThreadPool.Info(); newInfo.readFrom(input); @@ -125,7 +125,7 @@ public class ThreadPoolSerializationTests extends ESTestCase { output.setVersion(Version.CURRENT); info.writeTo(output); - StreamInput input = StreamInput.wrap(output.bytes()); + StreamInput input = output.bytes().streamInput(); ThreadPool.Info newInfo = new ThreadPool.Info(); newInfo.readFrom(input); diff --git a/core/src/test/java/org/elasticsearch/transport/NettyTransportServiceHandshakeTests.java b/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java similarity index 94% rename from core/src/test/java/org/elasticsearch/transport/NettyTransportServiceHandshakeTests.java rename to core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java index 7bbd6e0fea7..39ab7949a65 100644 --- a/core/src/test/java/org/elasticsearch/transport/NettyTransportServiceHandshakeTests.java +++ b/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.transport; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; @@ -32,7 +31,6 @@ import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.netty.NettyTransport; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -45,27 +43,27 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.containsString; -public class NettyTransportServiceHandshakeTests extends ESTestCase { +public class TransportServiceHandshakeTests extends ESTestCase { private static ThreadPool threadPool; private static final long timeout = Long.MAX_VALUE; @BeforeClass public static void startThreadPool() { - threadPool = new TestThreadPool(NettyTransportServiceHandshakeTests.class.getSimpleName()); + threadPool = new TestThreadPool(TransportServiceHandshakeTests.class.getSimpleName()); } private List transportServices = new ArrayList<>(); private NetworkHandle startServices(String nodeNameAndId, Settings settings, Version version) { - NettyTransport transport = - new NettyTransport( - settings, + MockTcpTransport transport = + new MockTcpTransport( + settings, threadPool, - new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, + new NoneCircuitBreakerService(), new NamedWriteableRegistry(), - new NoneCircuitBreakerService()); + new NetworkService(settings)); TransportService transportService = new MockTransportService(settings, transport, threadPool); transportService.start(); transportService.acceptIncomingRequests(); diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java index 6ea466d3cb3..e1df7201fbe 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeIT.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeIT.java @@ -32,24 +32,24 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Priority; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.MasterNotDiscoveredException; import org.elasticsearch.discovery.zen.ping.unicast.UnicastZenPing; import org.elasticsearch.node.Node; -import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.NodeConfigurationSource; import org.elasticsearch.test.TestCluster; import org.junit.After; import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.Before; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.function.Function; @@ -74,31 +74,16 @@ public class TribeIT extends ESIntegTestCase { private Node tribeNode; private Client tribeClient; - @BeforeClass - public static void setupSecondCluster() throws Exception { - ESIntegTestCase.beforeClass(); - NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { - @Override - public Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false).build(); - } - - @Override - public Collection> nodePlugins() { - return Collections.emptyList(); - } - - @Override - public Settings transportClientSettings() { - return null; - } - - }; - cluster2 = new InternalTestCluster(InternalTestCluster.configuredNodeMode(), randomLong(), createTempDir(), true, 2, 2, - UUIDs.randomBase64UUID(random()), nodeConfigurationSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, Collections.emptyList(), Function.identity()); - - cluster2.beforeTest(random(), 0.1); - cluster2.ensureAtLeastNumDataNodes(2); + @Before + public void setupSecondCluster() throws Exception { + if (cluster2 == null) { + final NodeConfigurationSource configSource = getNodeConfigSource(); + cluster2 = new InternalTestCluster(randomLong(), createTempDir(), true, 2, 2, + UUIDs.randomBase64UUID(random()), configSource, 0, false, SECOND_CLUSTER_NODE_PREFIX, getMockPlugins(), + Function.identity()); + cluster2.beforeTest(random(), 0.1); + cluster2.ensureAtLeastNumDataNodes(2); + } } @AfterClass @@ -145,8 +130,16 @@ public class TribeIT extends ESIntegTestCase { Settings merged = Settings.builder() .put("tribe.t1.cluster.name", internalCluster().getClusterName()) .put("tribe.t2.cluster.name", cluster2.getClusterName()) + .put("tribe.t1.transport.type", "local") + .put("tribe.t2.transport.type", "local") + .put("tribe.t1.discovery.type", "local") + .put("tribe.t2.discovery.type", "local") + .put("transport.type", "local") + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local") .put("tribe.blocks.write", false) + .put(NetworkModule.HTTP_ENABLED.getKey(), false) .put(settings) + .put(tribe1Defaults.build()) .put(tribe2Defaults.build()) .put(internalCluster().getDefaultSettings()) diff --git a/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java index 2bbedd8784b..43ee8fee151 100644 --- a/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/tribe/TribeServiceTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.tribe; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; @@ -27,15 +28,17 @@ public class TribeServiceTests extends ESTestCase { Settings globalSettings = Settings.builder() .put("node.name", "nodename") .put("path.home", "some/path").build(); - Settings clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, Settings.EMPTY); + Settings clientSettings = TribeService.buildClientSettings("tribe1", "parent_id", globalSettings, Settings.EMPTY); assertEquals("some/path", clientSettings.get("path.home")); assertEquals("nodename/tribe1", clientSettings.get("node.name")); assertEquals("tribe1", clientSettings.get("tribe.name")); - assertEquals("false", clientSettings.get("http.enabled")); + assertFalse(NetworkModule.HTTP_ENABLED.get(clientSettings)); assertEquals("false", clientSettings.get("node.master")); assertEquals("false", clientSettings.get("node.data")); assertEquals("false", clientSettings.get("node.ingest")); - assertEquals(7, clientSettings.getAsMap().size()); + assertEquals("false", clientSettings.get("node.local_storage")); + assertEquals("3707202549613653169", clientSettings.get("node.id.seed")); // should be fixed by the parent id and tribe name + assertEquals(9, clientSettings.getAsMap().size()); } public void testEnvironmentSettings() { @@ -45,7 +48,7 @@ public class TribeServiceTests extends ESTestCase { .put("path.conf", "conf/path") .put("path.scripts", "scripts/path") .put("path.logs", "logs/path").build(); - Settings clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, Settings.EMPTY); + Settings clientSettings = TribeService.buildClientSettings("tribe1", "parent_id", globalSettings, Settings.EMPTY); assertEquals("some/path", clientSettings.get("path.home")); assertEquals("conf/path", clientSettings.get("path.conf")); assertEquals("scripts/path", clientSettings.get("path.scripts")); @@ -54,7 +57,7 @@ public class TribeServiceTests extends ESTestCase { Settings tribeSettings = Settings.builder() .put("path.home", "alternate/path").build(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - TribeService.buildClientSettings("tribe1", globalSettings, tribeSettings); + TribeService.buildClientSettings("tribe1", "parent_id", globalSettings, tribeSettings); }); assertTrue(e.getMessage(), e.getMessage().contains("Setting [path.home] not allowed in tribe client")); } @@ -69,7 +72,7 @@ public class TribeServiceTests extends ESTestCase { .put("transport.host", "3.3.3.3") .put("transport.bind_host", "4.4.4.4") .put("transport.publish_host", "5.5.5.5").build(); - Settings clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, Settings.EMPTY); + Settings clientSettings = TribeService.buildClientSettings("tribe1", "parent_id", globalSettings, Settings.EMPTY); assertEquals("0.0.0.0", clientSettings.get("network.host")); assertEquals("1.1.1.1", clientSettings.get("network.bind_host")); assertEquals("2.2.2.2", clientSettings.get("network.publish_host")); @@ -85,7 +88,7 @@ public class TribeServiceTests extends ESTestCase { .put("transport.host", "6.6.6.6") .put("transport.bind_host", "7.7.7.7") .put("transport.publish_host", "8.8.8.8").build(); - clientSettings = TribeService.buildClientSettings("tribe1", globalSettings, tribeSettings); + clientSettings = TribeService.buildClientSettings("tribe1", "parent_id", globalSettings, tribeSettings); assertEquals("3.3.3.3", clientSettings.get("network.host")); assertEquals("4.4.4.4", clientSettings.get("network.bind_host")); assertEquals("5.5.5.5", clientSettings.get("network.publish_host")); diff --git a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java index 9e08ecde6fa..ea9fa078b55 100644 --- a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java +++ b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java @@ -59,7 +59,7 @@ import static org.hamcrest.Matchers.nullValue; @ClusterScope(scope= Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) public class SimpleTTLIT extends ESIntegTestCase { - static private final long PURGE_INTERVAL = 200; + private static final long PURGE_INTERVAL = 200; @Override protected int numberOfShards() { @@ -96,7 +96,6 @@ public class SimpleTTLIT extends ESIntegTestCase { .startObject("_ttl").field("enabled", true).field("default", "1d").endObject() .endObject() .endObject())); - ensureYellow("test"); final NumShards test = getNumShards("test"); @@ -252,7 +251,6 @@ public class SimpleTTLIT extends ESIntegTestCase { .startObject("_ttl").field("enabled", true).endObject() .endObject() .endObject())); - ensureYellow("test"); long aLongTime = 10000000; long firstTtl = aLongTime * 3; diff --git a/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java b/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java index 3573089fcaa..e81b4decb2d 100644 --- a/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java +++ b/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java @@ -87,7 +87,7 @@ public class TimestampTTLBWIT extends ESIntegTestCase { .setQuery(matchAllQuery()) .setSize(randomIntBetween(1, numDocs + 5)) .addSort("_timestamp", order) - .addField("_timestamp") + .addStoredField("_timestamp") .execute().actionGet(); assertNoFailures(searchResponse); SearchHit[] hits = searchResponse.getHits().hits(); diff --git a/core/src/test/java/org/elasticsearch/update/UpdateByNativeScriptIT.java b/core/src/test/java/org/elasticsearch/update/UpdateByNativeScriptIT.java index 9c335b95714..76b47f65f6b 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateByNativeScriptIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateByNativeScriptIT.java @@ -54,7 +54,6 @@ public class UpdateByNativeScriptIT extends ESIntegTestCase { public void testThatUpdateUsingNativeScriptWorks() throws Exception { createIndex("test"); - ensureYellow(); index("test", "type", "1", "text", "value"); diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java index 55834c181b0..c8cacbc36c3 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -65,11 +65,8 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows; -import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -800,7 +797,7 @@ public class UpdateIT extends ESIntegTestCase { final CountDownLatch latch = new CountDownLatch(numberOfThreads); final CountDownLatch startLatch = new CountDownLatch(1); final int numberOfUpdatesPerThread = scaledRandomIntBetween(100, 500); - final List failures = new CopyOnWriteArrayList<>(); + final List failures = new CopyOnWriteArrayList<>(); for (int i = 0; i < numberOfThreads; i++) { Runnable r = new Runnable() { @@ -832,7 +829,7 @@ public class UpdateIT extends ESIntegTestCase { logger.warn("Test was forcefully stopped. Client [{}] may still have outstanding requests.", Thread.currentThread().getName()); failures.add(e); Thread.currentThread().interrupt(); - } catch (Throwable e) { + } catch (Exception e) { failures.add(e); } finally { latch.countDown(); @@ -900,7 +897,7 @@ public class UpdateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { synchronized (failedMap) { incrementMapValue(id, failedMap); } @@ -922,7 +919,7 @@ public class UpdateIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { synchronized (failedMap) { incrementMapValue(id, failedMap); } @@ -976,7 +973,7 @@ public class UpdateIT extends ESIntegTestCase { } } } - } catch (Throwable e) { + } catch (Exception e) { logger.error("Something went wrong", e); failures.add(e); } finally { diff --git a/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java b/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java index b4b5eefc832..f14d91465f6 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateNoopIT.java @@ -240,12 +240,12 @@ public class UpdateNoopIT extends ESIntegTestCase { private void updateAndCheckSource(long expectedVersion, Boolean detectNoop, XContentBuilder xContentBuilder) { UpdateResponse updateResponse = update(detectNoop, expectedVersion, xContentBuilder); - assertEquals(updateResponse.getGetResult().sourceRef().toUtf8(), xContentBuilder.bytes().toUtf8()); + assertEquals(updateResponse.getGetResult().sourceRef().utf8ToString(), xContentBuilder.bytes().utf8ToString()); } private UpdateResponse update(Boolean detectNoop, long expectedVersion, XContentBuilder xContentBuilder) { UpdateRequestBuilder updateRequest = client().prepareUpdate("test", "type1", "1") - .setDoc(xContentBuilder.bytes().toUtf8()) + .setDoc(xContentBuilder.bytes().utf8ToString()) .setDocAsUpsert(true) .setFields("_source"); if (detectNoop != null) { diff --git a/core/src/test/java/org/elasticsearch/versioning/ConcurrentDocumentOperationIT.java b/core/src/test/java/org/elasticsearch/versioning/ConcurrentDocumentOperationIT.java index b2cc794ac6c..e2c572f783a 100644 --- a/core/src/test/java/org/elasticsearch/versioning/ConcurrentDocumentOperationIT.java +++ b/core/src/test/java/org/elasticsearch/versioning/ConcurrentDocumentOperationIT.java @@ -53,7 +53,7 @@ public class ConcurrentDocumentOperationIT extends ESIntegTestCase { } @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { logger.error("Unexpected exception while indexing", e); failure.set(e); latch.countDown(); diff --git a/core/src/test/resources/indices/bwc/index-2.3.4.zip b/core/src/test/resources/indices/bwc/index-2.3.4.zip new file mode 100644 index 00000000000..2d8514724b4 Binary files /dev/null and b/core/src/test/resources/indices/bwc/index-2.3.4.zip differ diff --git a/core/src/test/resources/indices/bwc/repo-2.3.4.zip b/core/src/test/resources/indices/bwc/repo-2.3.4.zip new file mode 100644 index 00000000000..ddd92319d16 Binary files /dev/null and b/core/src/test/resources/indices/bwc/repo-2.3.4.zip differ diff --git a/core/src/test/resources/org/elasticsearch/action/bulk/simple-bulk4.json b/core/src/test/resources/org/elasticsearch/action/bulk/simple-bulk4.json index 7fc41b1d9bb..362e4a473e9 100644 --- a/core/src/test/resources/org/elasticsearch/action/bulk/simple-bulk4.json +++ b/core/src/test/resources/org/elasticsearch/action/bulk/simple-bulk4.json @@ -1,7 +1,7 @@ { "update" : {"_id" : "1", "_retry_on_conflict" : 2} } { "doc" : {"field" : "value"} } { "update" : { "_id" : "0", "_type" : "type1", "_index" : "index1" } } -{ "script" : "counter += param1", "lang" : "javascript", "params" : {"param1" : 1}, "upsert" : {"counter" : 1}} +{ "script" : { "inline" : "counter += param1", "lang" : "javascript", "params" : {"param1" : 1}}, "upsert" : {"counter" : 1}} { "delete" : { "_id" : "2" } } { "create" : { "_id" : "3" } } { "field1" : "value3" } diff --git a/dev-tools/create_bwc_index.py b/dev-tools/create_bwc_index.py index f73bc58554b..c96b66013b3 100644 --- a/dev-tools/create_bwc_index.py +++ b/dev-tools/create_bwc_index.py @@ -31,7 +31,6 @@ DEFAULT_HTTP_TCP_PORT = 9200 if sys.version_info[0] < 3: print('%s must use python 3.x (for the ES python client)' % sys.argv[0]) -from datetime import datetime try: from elasticsearch import Elasticsearch from elasticsearch.exceptions import ConnectionError @@ -178,9 +177,15 @@ def generate_index(client, version, index_name): logging.info('Create single shard test index') mappings = {} - if not version.startswith('2.'): - # TODO: we need better "before/onOr/after" logic in python - + warmers = {} + if parse_version(version) < parse_version('2.0.0-alpha1'): + warmers['warmer1'] = { + 'source': { + 'query': { + 'match_all': {} + } + } + } # backcompat test for legacy type level analyzer settings, see #8874 mappings['analyzer_type1'] = { 'analyzer': 'standard', @@ -219,15 +224,9 @@ def generate_index(client, version, index_name): } } mappings['meta_fields'] = { - '_id': { - 'path': 'myid' - }, '_routing': { - 'path': 'myrouting' + 'required': 'false' }, - '_boost': { - 'null_value': 2.0 - } } mappings['custom_formats'] = { 'properties': { @@ -246,34 +245,57 @@ def generate_index(client, version, index_name): 'auto_boost': True } } - - mappings['norms'] = { - 'properties': { - 'string_with_norms_disabled': { - 'type': 'string', - 'norms': { - 'enabled': False - } - }, - 'string_with_norms_enabled': { - 'type': 'string', - 'index': 'not_analyzed', - 'norms': { - 'enabled': True, - 'loading': 'eager' + if parse_version(version) < parse_version("5.0.0-alpha1"): + mappings['norms'] = { + 'properties': { + 'string_with_norms_disabled': { + 'type': 'string', + 'norms' : { + 'enabled' : False + } + }, + 'string_with_norms_enabled': { + 'type': 'string', + 'index': 'not_analyzed', + 'norms': { + 'enabled' : True, + 'loading': 'eager' + } } } } - } - mappings['doc'] = { - 'properties': { - 'string': { - 'type': 'string', - 'boost': 4 + mappings['doc'] = { + 'properties': { + 'string': { + 'type': 'string', + 'boost': 4 + } + } + } + else: # current version of the norms mapping + mappings['norms'] = { + 'properties': { + 'string_with_norms_disabled': { + 'type': 'text', + 'norms' : False + }, + 'string_with_norms_enabled': { + 'type': 'keyword', + 'index': 'not_analyzed', + 'norms': True, + 'eager_global_ordinals' : True + } + } + } + mappings['doc'] = { + 'properties': { + 'string': { + 'type': 'text', + 'boost': 4 + } } } - } settings = { 'number_of_shards': 1, @@ -284,21 +306,14 @@ def generate_index(client, version, index_name): settings['gc_deletes'] = '60000', # Same as ES default (5 GB), but missing the units to make sure they are inserted on upgrade: settings['merge.policy.max_merged_segment'] = '5368709120' - - warmers = {} - warmers['warmer1'] = { - 'source': { - 'query': { - 'match_all': {} - } - } + body = { + 'settings': settings, + 'mappings': mappings, } - client.indices.create(index=index_name, body={ - 'settings': settings, - 'mappings': mappings, - 'warmers': warmers - }) + if warmers: + body['warmers'] = warmers + client.indices.create(index=index_name, body=body) health = client.cluster.health(wait_for_status='green', wait_for_relocating_shards=0) assert health['timed_out'] == False, 'cluster health timed out %s' % health @@ -313,15 +328,17 @@ def generate_index(client, version, index_name): run_basic_asserts(client, index_name, 'doc', num_docs) def snapshot_index(client, version, repo_dir): + persistent = { + 'cluster.routing.allocation.exclude.version_attr': version + } + if parse_version(version) < parse_version('5.0.0-alpha1'): + # Same as ES default (30 seconds), but missing the units to make sure they are inserted on upgrade: + persistent['discovery.zen.publish_timeout'] = '30000' + # Same as ES default (512 KB), but missing the units to make sure they are inserted on upgrade: + persistent['indices.recovery.file_chunk_size'] = '524288' # Add bogus persistent settings to make sure they can be restored client.cluster.put_settings(body={ - 'persistent': { - 'cluster.routing.allocation.exclude.version_attr': version, - # Same as ES default (30 seconds), but missing the units to make sure they are inserted on upgrade: - 'discovery.zen.publish_timeout': '30000', - # Same as ES default (512 KB), but missing the units to make sure they are inserted on upgrade: - 'indices.recovery.file_chunk_size': '524288', - } + 'persistent': persistent }) client.indices.put_template(name='template_' + version.lower(), order=0, body={ "template": "te*", @@ -446,7 +463,24 @@ def shutdown_node(node): logging.info('Shutting down node with pid %d', node.pid) node.terminate() node.wait() - + +def parse_version(version): + import re + splitted = re.split('[.-]', version) + if len(splitted) == 3: + splitted = splitted + ['GA'] + splitted = [s.lower() for s in splitted] + assert len(splitted) == 4; + return splitted + +assert parse_version('5.0.0-alpha1') == parse_version('5.0.0-alpha1') +assert parse_version('5.0.0-alpha1') < parse_version('5.0.0-alpha2') +assert parse_version('5.0.0-alpha1') < parse_version('5.0.0-beta1') +assert parse_version('5.0.0-beta1') < parse_version('5.0.0') +assert parse_version('1.2.3') < parse_version('2.1.0') +assert parse_version('1.2.3') < parse_version('1.2.4') +assert parse_version('1.1.0') < parse_version('1.2.0') + def main(): logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s', level=logging.INFO, datefmt='%Y-%m-%d %I:%M:%S %p') @@ -461,3 +495,4 @@ if __name__ == '__main__': main() except KeyboardInterrupt: print('Caught keyboard interrupt, exiting...') + diff --git a/distribution/build.gradle b/distribution/build.gradle index 540796fc5a3..dcd5f170fc5 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -49,7 +49,7 @@ ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive() task buildModules(type: Sync) { into 'build/modules' -} +} ext.restTestExpansions = [ 'expected.modules.count': 0, @@ -82,6 +82,21 @@ project.rootProject.subprojects.findAll { it.path.startsWith(':modules:') }.each restTestExpansions['expected.modules.count'] += 1 } +// Integ tests work over the rest http layer, so we need a transport included with the integ test zip. +// All transport modules are included so that they may be randomized for testing +task buildTransportModules(type: Sync) { + into 'build/transport-modules' +} + +project.rootProject.subprojects.findAll { it.path.startsWith(':modules:transport-') }.each { Project transport -> + buildTransportModules { + dependsOn({ project(transport.path).bundlePlugin }) + into(transport.name) { + from { zipTree(project(transport.path).bundlePlugin.outputs.files.singleFile) } + } + } +} + // make sure we have a clean task since we aren't a java project, but we have tasks that // put stuff in the build dir task clean(type: Delete) { @@ -145,6 +160,11 @@ subprojects { from project(':distribution').buildModules } + transportModulesFiles = copySpec { + into "modules" + from project(':distribution').buildTransportModules + } + configFiles = copySpec { from '../src/main/resources/config' MavenFilteringHack.filter(it, expansions) @@ -209,6 +229,8 @@ configure(subprojects.findAll { ['zip', 'tar', 'integ-test-zip'].contains(it.nam } if (project.name != 'integ-test-zip') { with modulesFiles + } else { + with transportModulesFiles } } } diff --git a/distribution/licenses/jopt-simple-4.9.jar.sha1 b/distribution/licenses/jopt-simple-4.9.jar.sha1 deleted file mode 100644 index b86fa62ac20..00000000000 --- a/distribution/licenses/jopt-simple-4.9.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ee9e9eaa0a35360dcfeac129ff4923215fd65904 \ No newline at end of file diff --git a/distribution/licenses/jopt-simple-5.0.2.jar.sha1 b/distribution/licenses/jopt-simple-5.0.2.jar.sha1 new file mode 100644 index 00000000000..b50ed4fea3b --- /dev/null +++ b/distribution/licenses/jopt-simple-5.0.2.jar.sha1 @@ -0,0 +1 @@ +98cafc6081d5632b61be2c9e60650b64ddbc637c \ No newline at end of file diff --git a/distribution/licenses/netty-3.10.5.Final.jar.sha1 b/distribution/licenses/netty-3.10.5.Final.jar.sha1 deleted file mode 100644 index 6f190752e9e..00000000000 --- a/distribution/licenses/netty-3.10.5.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9ca7d55d246092bddd29b867706e2f6c7db701a0 diff --git a/distribution/licenses/securesm-1.0.jar.sha1 b/distribution/licenses/securesm-1.0.jar.sha1 deleted file mode 100644 index 96d45d93e66..00000000000 --- a/distribution/licenses/securesm-1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c0c6cf986ba0057390bfcc80c366a0e3157f944b diff --git a/distribution/licenses/securesm-1.1.jar.sha1 b/distribution/licenses/securesm-1.1.jar.sha1 new file mode 100644 index 00000000000..9144a082b61 --- /dev/null +++ b/distribution/licenses/securesm-1.1.jar.sha1 @@ -0,0 +1 @@ +1e423447d020041534be94c0f31a49fbdc1f2950 \ No newline at end of file diff --git a/distribution/src/main/packaging/scripts/postrm b/distribution/src/main/packaging/scripts/postrm index 2fa42678d96..19c57eafa2b 100644 --- a/distribution/src/main/packaging/scripts/postrm +++ b/distribution/src/main/packaging/scripts/postrm @@ -13,7 +13,6 @@ SOURCE_ENV_FILE=true REMOVE_DIRS=false -REMOVE_SERVICE=false REMOVE_USER_AND_GROUP=false case "$1" in @@ -21,7 +20,6 @@ case "$1" in # Debian #################################################### remove) REMOVE_DIRS=true - REMOVE_SERVICE=true ;; purge) @@ -34,7 +32,6 @@ case "$1" in # RedHat #################################################### 0) REMOVE_DIRS=true - REMOVE_SERVICE=true REMOVE_USER_AND_GROUP=true ;; 1) @@ -65,20 +62,6 @@ if [ "$SOURCE_ENV_FILE" = "true" ]; then fi fi -if [ "$REMOVE_SERVICE" = "true" ]; then - if command -v systemctl >/dev/null; then - systemctl disable elasticsearch.service > /dev/null 2>&1 || true - fi - - if command -v chkconfig >/dev/null; then - chkconfig --del elasticsearch 2> /dev/null || true - fi - - if command -v update-rc.d >/dev/null; then - update-rc.d elasticsearch remove >/dev/null || true - fi -fi - if [ "$REMOVE_DIRS" = "true" ]; then if [ -d "$LOG_DIR" ]; then diff --git a/distribution/src/main/packaging/scripts/prerm b/distribution/src/main/packaging/scripts/prerm index 79523629f77..592f2f9948f 100644 --- a/distribution/src/main/packaging/scripts/prerm +++ b/distribution/src/main/packaging/scripts/prerm @@ -12,12 +12,14 @@ STOP_REQUIRED=false +REMOVE_SERVICE=false case "$1" in # Debian #################################################### remove) STOP_REQUIRED=true + REMOVE_SERVICE=true ;; upgrade) if [ "$RESTART_ON_UPGRADE" = "true" ]; then @@ -30,6 +32,7 @@ case "$1" in # RedHat #################################################### 0) STOP_REQUIRED=true + REMOVE_SERVICE=true ;; 1) # Dont do anything on upgrade, because the preun script in redhat gets executed after the postinst (madness!) @@ -64,6 +67,20 @@ if [ "$STOP_REQUIRED" = "true" ]; then echo " OK" fi +if [ "$REMOVE_SERVICE" = "true" ]; then + if command -v systemctl >/dev/null; then + systemctl disable elasticsearch.service > /dev/null 2>&1 || true + fi + + if command -v chkconfig >/dev/null; then + chkconfig --del elasticsearch 2> /dev/null || true + fi + + if command -v update-rc.d >/dev/null; then + update-rc.d elasticsearch remove >/dev/null || true + fi +fi + SCRIPTS_DIR="/etc/elasticsearch/scripts" # delete the scripts directory if and only if empty if [ -d "$SCRIPTS_DIR" ]; then diff --git a/docs/build.gradle b/docs/build.gradle index 660755a1c65..26560ce064a 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -30,20 +30,24 @@ integTest { configFile 'scripts/my_script.py' configFile 'userdict_ja.txt' configFile 'KeywordTokenizer.rbbi' + // Whitelist reindexing from the local node so we can test it. + setting 'reindex.remote.whitelist', 'myself' } } // Build the cluser with all plugins + project.rootProject.subprojects.findAll { it.parent.path == ':plugins' }.each { subproj -> /* Skip repositories. We just aren't going to be able to test them so it * doesn't make sense to waste time installing them. */ if (subproj.path.startsWith(':plugins:repository-')) { return } - integTest { - cluster { - // We need a non-decorated project object, so we lookup the project by path - plugin subproj.name, project(subproj.path) + subproj.afterEvaluate { // need to wait until the project has been configured + integTest { + cluster { + plugin subproj.path + } } } } @@ -81,3 +85,15 @@ Closure setupTwitter = { String name, int count -> } setupTwitter('twitter', 5) setupTwitter('big_twitter', 120) + +buildRestTests.setups['host'] = ''' + # Fetch the http host. We use the host of the master because we know there will always be a master. + - do: + cluster.state: {} + - set: { master_node: master } + - do: + nodes.info: + metric: [ http ] + - is_true: nodes.$master.http.publish_address + - set: {nodes.$master.http.publish_address: host} +''' diff --git a/docs/community-clients/index.asciidoc b/docs/community-clients/index.asciidoc index 6ee3398fb06..707d976536b 100644 --- a/docs/community-clients/index.asciidoc +++ b/docs/community-clients/index.asciidoc @@ -84,6 +84,8 @@ See the {client}/groovy-api/current/index.html[official Elasticsearch Groovy cli Also see the {client}/java-api/current/index.html[official Elasticsearch Java client]. +* [Flummi](https://github.com/otto-de/flummi): + Java Rest client with comprehensive query DSL API * https://github.com/searchbox-io/Jest[Jest]: Java Rest client. diff --git a/docs/java-api/client.asciidoc b/docs/java-api/client.asciidoc index bb254e8b356..65b16052376 100644 --- a/docs/java-api/client.asciidoc +++ b/docs/java-api/client.asciidoc @@ -17,7 +17,7 @@ that connects to a cluster. The client must have the same major version (e.g. `2.x`, or `5.x`) as the nodes in the cluster. Clients may connect to clusters which have a different -minor version (e.g. `2.3.x`) but it is possible that new funcionality may not +minor version (e.g. `2.3.x`) but it is possible that new functionality may not be supported. Ideally, the client should have the same version as the cluster. @@ -37,7 +37,7 @@ be "two hop" operations). -------------------------------------------------- // on startup -TransportClient client = TransportClient.builder().build() +TransportClient client = new PreBuiltTransportClient(Settings.EMPTY) .addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("host1"), 9300)) .addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("host2"), 9300)); @@ -53,7 +53,7 @@ Note that you have to set the cluster name if you use one different than -------------------------------------------------- Settings settings = Settings.builder() .put("cluster.name", "myClusterName").build(); -TransportClient client = TransportClient.builder().settings(settings).build(); +TransportClient client = new PreBuiltTransportClient(settings); //Add transport addresses and do something with the client... -------------------------------------------------- @@ -79,7 +79,7 @@ In order to enable sniffing, set `client.transport.sniff` to `true`: -------------------------------------------------- Settings settings = Settings.settingsBuilder() .put("client.transport.sniff", true).build(); -TransportClient client = TransportClient.builder().settings(settings).build(); +TransportClient client = new PreBuiltTransportClient(settings); -------------------------------------------------- Other transport client level settings include: diff --git a/docs/java-api/query-dsl/template-query.asciidoc b/docs/java-api/query-dsl/template-query.asciidoc index af950672e49..dc31d9087e8 100644 --- a/docs/java-api/query-dsl/template-query.asciidoc +++ b/docs/java-api/query-dsl/template-query.asciidoc @@ -3,6 +3,21 @@ See {ref}/search-template.html[Search Template] documentation +In order to use the `template` query from the Java API +the lang-mustache module dependency should be on the classpath and +the transport client should be loaded with the lang-mustache plugin: + +[source,java] +-------------------------------------------------- +TransportClient transportClient = TransportClient.builder() + .settings(Settings.builder().put("node.name", "node")) + .addPlugin(MustachePlugin.class) + .build(); +transportClient.addTransportAddress( + new InetSocketTransportAddress(new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 9300)) +); +-------------------------------------------------- + Define your template parameters as a `Map`: [source,java] @@ -31,7 +46,7 @@ Define your template query: [source,java] -------------------------------------------------- -QueryBuilder qb = templateQuery( +QueryBuilder qb = new TemplateQueryBuilder( "gender_template", <1> ScriptService.ScriptType.FILE, <2> template_params); <3> @@ -40,11 +55,14 @@ QueryBuilder qb = templateQuery( <2> template stored on disk in `gender_template.mustache` <3> parameters -You can also store your template in a special index named `.scripts`: +You can also store your template in the cluster state: [source,java] -------------------------------------------------- -client.preparePutIndexedScript("mustache", "template_gender", +client.admin().cluster().preparePutStoredScript() + .setScriptLang("mustache") + .setId("template_gender") + .setSource(new BytesArray( "{\n" + " \"template\" : {\n" + " \"query\" : {\n" + @@ -53,19 +71,19 @@ client.preparePutIndexedScript("mustache", "template_gender", " }\n" + " }\n" + " }\n" + - "}").get(); + "}")).get(); -------------------------------------------------- -To execute an indexed templates, use `ScriptService.ScriptType.INDEXED`: +To execute a stored templates, use `ScriptService.ScriptType.STORED`: [source,java] -------------------------------------------------- -QueryBuilder qb = templateQuery( +QueryBuilder qb = new TemplateQueryBuilder( "gender_template", <1> - ScriptType.INDEXED, <2> + ScriptType.STORED, <2> template_params); <3> -------------------------------------------------- <1> template name -<2> template stored in an index +<2> template stored in the cluster state <3> parameters diff --git a/docs/plugins/analysis-icu.asciidoc b/docs/plugins/analysis-icu.asciidoc index 3dbe7b2e354..a21e0c5c82f 100644 --- a/docs/plugins/analysis-icu.asciidoc +++ b/docs/plugins/analysis-icu.asciidoc @@ -161,8 +161,6 @@ PUT icu_sample } } -GET _cluster/health?wait_for_status=yellow - POST icu_sample/_analyze?analyzer=my_analyzer&text=Elasticsearch. Wow! -------------------------------------------------- // CONSOLE @@ -360,8 +358,6 @@ PUT /my_index } } -GET _cluster/health?wait_for_status=yellow - GET _search <3> { "query": { @@ -481,8 +477,6 @@ PUT icu_sample } } -GET _cluster/health?wait_for_status=yellow - GET icu_sample/_analyze?analyzer=latin { "text": "你好" <2> diff --git a/docs/plugins/analysis-kuromoji.asciidoc b/docs/plugins/analysis-kuromoji.asciidoc index b0d6c1eb962..d7b357cb455 100644 --- a/docs/plugins/analysis-kuromoji.asciidoc +++ b/docs/plugins/analysis-kuromoji.asciidoc @@ -172,8 +172,6 @@ PUT kuromoji_sample } } -GET _cluster/health?wait_for_status=yellow - POST kuromoji_sample/_analyze?analyzer=my_analyzer&text=東京スカイツリー -------------------------------------------------- // CONSOLE @@ -226,8 +224,6 @@ PUT kuromoji_sample } } -GET _cluster/health?wait_for_status=yellow - POST kuromoji_sample/_analyze?analyzer=my_analyzer&text=飲み -------------------------------------------------- // CONSOLE @@ -286,8 +282,6 @@ PUT kuromoji_sample } } -GET _cluster/health?wait_for_status=yellow - POST kuromoji_sample/_analyze?analyzer=my_analyzer&text=寿司がおいしいね -------------------------------------------------- @@ -360,8 +354,6 @@ PUT kuromoji_sample } } -GET _cluster/health?wait_for_status=yellow - POST kuromoji_sample/_analyze?analyzer=katakana_analyzer&text=寿司 <1> POST kuromoji_sample/_analyze?analyzer=romaji_analyzer&text=寿司 <2> @@ -413,8 +405,6 @@ PUT kuromoji_sample } } -GET _cluster/health?wait_for_status=yellow - POST kuromoji_sample/_analyze?analyzer=my_analyzer&text=コピー <1> POST kuromoji_sample/_analyze?analyzer=my_analyzer&text=サーバー <2> @@ -464,8 +454,6 @@ PUT kuromoji_sample } } -GET _cluster/health?wait_for_status=yellow - POST kuromoji_sample/_analyze?analyzer=analyzer_with_ja_stop&text=ストップは消える -------------------------------------------------- // CONSOLE @@ -512,8 +500,6 @@ PUT kuromoji_sample } } -GET _cluster/health?wait_for_status=yellow - POST kuromoji_sample/_analyze?analyzer=my_analyzer&text=一〇〇〇 -------------------------------------------------- diff --git a/docs/plugins/analysis-phonetic.asciidoc b/docs/plugins/analysis-phonetic.asciidoc index b44218b7521..34f14abe3c5 100644 --- a/docs/plugins/analysis-phonetic.asciidoc +++ b/docs/plugins/analysis-phonetic.asciidoc @@ -79,8 +79,6 @@ PUT phonetic_sample } } -GET _cluster/health?wait_for_status=yellow - POST phonetic_sample/_analyze?analyzer=my_analyzer&text=Joe Bloggs <1> -------------------------------------------------- // CONSOLE diff --git a/docs/plugins/ingest-user-agent.asciidoc b/docs/plugins/ingest-user-agent.asciidoc new file mode 100644 index 00000000000..1110a0d859c --- /dev/null +++ b/docs/plugins/ingest-user-agent.asciidoc @@ -0,0 +1,74 @@ +[[ingest-user-agent]] +=== Ingest user agent processor plugin + +The `user_agent` processor extracts details from the user agent string a browser sends with its web requests. +This processor adds this information by default under the `user_agent` field. + +The ingest-user-agent plugin ships by default with the regexes.yaml made available by uap-java with an Apache 2.0 license. For more details see https://github.com/ua-parser/uap-core. + +[[ingest-user-agent-install]] +[float] +==== Installation + +This plugin can be installed using the plugin manager: + +[source,sh] +---------------------------------------------------------------- +sudo bin/elasticsearch-plugin install ingest-user-agent +---------------------------------------------------------------- + +The plugin must be installed on every node in the cluster, and each node must +be restarted after installation. + +[[ingest-user-agent-remove]] +[float] +==== Removal + +The plugin can be removed with the following command: + +[source,sh] +---------------------------------------------------------------- +sudo bin/elasticsearch-plugin remove ingest-user-agent +---------------------------------------------------------------- + +The node must be stopped before removing the plugin. + +[[using-ingest-user-agent]] +==== Using the user_agent Processor in a Pipeline + +[[ingest-user-agent-options]] +.User-agent options +[options="header"] +|====== +| Name | Required | Default | Description +| `field` | yes | - | The field containing the user agent string. +| `target_field` | no | user_agent | The field that will be filled with the user agent details. +| `regex_file` | no | - | The name of the file in the `config/ingest-user-agent` directory containing the regular expressions for parsing the user agent string. Both the directory and the file have to be created before starting Elasticsearch. If not specified, ingest-user-agent will use the regexes.yaml from uap-core it ships with (see below). +| `properties` | no | [`name`, `major`, `minor`, `patch`, `build`, `os`, `os_name`, `os_major`, `os_minor`, `device`] | Controls what properties are added to `target_field`. +|====== + +Here is an example that adds the user agent details to the `user_agent` field based on the `agent` field: + +[source,js] +-------------------------------------------------- +{ + "description" : "...", + "processors" : [ + { + "user_agent" : { + "field" : "agent" + } + } + ] +} +-------------------------------------------------- + +===== Using a custom regex file +To use a custom regex file for parsing the user agents, that file has to be put into the `config/ingest-user-agent` directory and +has to have a `.yaml` filename extension. The file has to be present at node startup, any changes to it or any new files added +while the node is running will not have any effect. + +In practice, it will make most sense for any custom regex file to be a variant of the default file, either a more recent version +or a customised version. + +The default file included in `ingest-user-agent` is the `regexes.yaml` from uap-core: https://github.com/ua-parser/uap-core/blob/master/regexes.yaml diff --git a/docs/plugins/ingest.asciidoc b/docs/plugins/ingest.asciidoc index 7488cab6565..490d3d1362b 100644 --- a/docs/plugins/ingest.asciidoc +++ b/docs/plugins/ingest.asciidoc @@ -19,8 +19,14 @@ The GeoIP processor adds information about the geographical location of IP addre This processor adds this information by default under the `geoip` field. + The ingest-geoip plugin ships by default with the GeoLite2 City and GeoLite2 Country geoip2 databases from Maxmind made available -under the CCA-ShareAlike 3.0 license. For more details see, http://dev.maxmind.com/geoip/geoip2/geolite2/. +under the CCA-ShareAlike 3.0 license. For more details see, http://dev.maxmind.com/geoip/geoip2/geolite2/. + +<>:: + +A processor that extracts details from the User-Agent header value. include::ingest-attachment.asciidoc[] -include::ingest-geoip.asciidoc[] \ No newline at end of file +include::ingest-geoip.asciidoc[] + +include::ingest-user-agent.asciidoc[] \ No newline at end of file diff --git a/docs/plugins/mapper-attachments.asciidoc b/docs/plugins/mapper-attachments.asciidoc index b287decd4cf..f2c034a317e 100644 --- a/docs/plugins/mapper-attachments.asciidoc +++ b/docs/plugins/mapper-attachments.asciidoc @@ -246,7 +246,7 @@ PUT /test/person/1?refresh=true } GET /test/person/_search { - "fields": [ "file.content_type" ], + "stored_fields": [ "file.content_type" ], "query": { "match": { "file.content_type": "text plain" @@ -367,7 +367,7 @@ PUT /test/person/1?refresh=true } GET /test/person/_search { - "fields": [], + "stored_fields": [], "query": { "match": { "file.content": "king queen" diff --git a/docs/plugins/mapper-size.asciidoc b/docs/plugins/mapper-size.asciidoc index 800a640890a..df16d7eb857 100644 --- a/docs/plugins/mapper-size.asciidoc +++ b/docs/plugins/mapper-size.asciidoc @@ -52,7 +52,8 @@ PUT my_index -------------------------- // CONSOLE -The value of the `_size` field is accessible in queries: +The value of the `_size` field is accessible in queries, aggregations, scripts, +and when sorting: [source,js] -------------------------- @@ -75,6 +76,26 @@ GET my_index/_search "gt": 10 } } + }, + "aggs": { + "sizes": { + "terms": { + "field": "_size", <2> + "size": 10 + } + } + }, + "sort": [ + { + "_size": { <3> + "order": "desc" + } + } + ], + "script_fields": { + "size": { + "script": "doc['_size']" <4> + } } } -------------------------- @@ -82,3 +103,7 @@ GET my_index/_search // TEST[continued] <1> Querying on the `_size` field +<2> Aggregating on the `_size` field +<3> Sorting on the `_size` field +<4> Accessing the `_size` field in scripts (inline scripts must be modules-security-scripting.html#enable-dynamic-scripting[enabled] for this example to work) + diff --git a/docs/plugins/repository-hdfs.asciidoc b/docs/plugins/repository-hdfs.asciidoc index e91532e8af5..62b1d2a95ca 100644 --- a/docs/plugins/repository-hdfs.asciidoc +++ b/docs/plugins/repository-hdfs.asciidoc @@ -46,20 +46,62 @@ plugin folder and point `HADOOP_HOME` variable to it; this should minimize the a [[repository-hdfs-config]] ==== Configuration Properties -Once installed, define the configuration for the `hdfs` repository through `elasticsearch.yml` or the +Once installed, define the configuration for the `hdfs` repository through the {ref}/modules-snapshots.html[REST API]: +[source,js] +---- +PUT _snapshot/my_hdfs_repository +{ + "type": "hdfs", + "settings": { + "uri": "hdfs://namenode:8020/", + "path": "elasticsearch/respositories/my_hdfs_repository", + "conf.dfs.client.read.shortcircuit": "true" + } +} +---- +// CONSOLE +// TEST[skip:we don't have hdfs set up while testing this] + +The following settings are supported: + +[horizontal] +`uri`:: + + The uri address for hdfs. ex: "hdfs://:/". (Required) + +`path`:: + + The file path within the filesystem where data is stored/loaded. ex: "path/to/file". (Required) + +`load_defaults`:: + + Whether to load the default Hadoop configuration or not. (Enabled by default) + +`conf.`:: + + Inlined configuration parameter to be added to Hadoop configuration. (Optional) + Only client oriented properties from the hadoop http://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/core-default.xml[core] and http://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-hdfs/hdfs-default.xml[hdfs] configuration files will be recognized by the plugin. + +`compress`:: + + Whether to compress the metadata or not. (Disabled by default) + +`chunk_size`:: + + Override the chunk size. (Disabled by default) + + +Alternatively, you can define the `hdfs` repository and its settings in your `elasticsearch.yml`: [source,yaml] ---- -repositories +repositories: hdfs: uri: "hdfs://:/" \# required - HDFS address only path: "some/path" \# required - path within the file-system where data is stored/loaded load_defaults: "true" \# optional - whether to load the default Hadoop configuration (default) or not - conf_location: "extra-cfg.xml" \# optional - Hadoop configuration XML to be loaded (use commas for multi values) conf. : "" \# optional - 'inlined' key=value added to the Hadoop configuration - concurrent_streams: 5 \# optional - the number of concurrent streams (defaults to 5) compress: "false" \# optional - whether to compress the metadata or not (default) chunk_size: "10mb" \# optional - chunk size (disabled by default) - ---- diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index e1f07f6c8e2..90b0c803996 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -242,6 +242,12 @@ The following settings are supported: currently supported by the plugin. For more information about the different classes, see http://docs.aws.amazon.com/AmazonS3/latest/dev/storage-class-intro.html[AWS Storage Classes Guide] +`path_style_access`:: + + Activate path style access for [virtual hosting of buckets](http://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html). + The default behaviour is to detect which access style to use based on the configured endpoint (an IP will result + in path-style access) and the bucket being accessed (some buckets are not valid DNS names). + Note that you can define S3 repository settings for all S3 repositories in `elasticsearch.yml` configuration file. They are all prefixed with `repositories.s3.`. For example, you can define compression for all S3 repositories by setting `repositories.s3.compress: true` in `elasticsearch.yml`. diff --git a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc index 3c1f4ae860a..959b93611d8 100644 --- a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc @@ -9,8 +9,8 @@ Example: -------------------------------------------------- { "aggs" : { - "genders" : { - "terms" : { "field" : "gender" } + "genres" : { + "terms" : { "field" : "genre" } } } } @@ -24,16 +24,20 @@ Response: ... "aggregations" : { - "genders" : { + "genres" : { "doc_count_error_upper_bound": 0, <1> "sum_other_doc_count": 0, <2> "buckets" : [ <3> { - "key" : "male", + "key" : "jazz", "doc_count" : 10 }, { - "key" : "female", + "key" : "rock", + "doc_count" : 10 + }, + { + "key" : "electronic", "doc_count" : 10 }, ] @@ -247,9 +251,9 @@ Ordering the buckets by their `doc_count` in an ascending manner: -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { - "field" : "gender", + "field" : "genre", "order" : { "_count" : "asc" } } } @@ -263,9 +267,9 @@ Ordering the buckets alphabetically by their terms in an ascending manner: -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { - "field" : "gender", + "field" : "genre", "order" : { "_term" : "asc" } } } @@ -280,13 +284,13 @@ Ordering the buckets by single value metrics sub-aggregation (identified by the -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { - "field" : "gender", - "order" : { "avg_height" : "desc" } + "field" : "genre", + "order" : { "avg_play_count" : "desc" } }, "aggs" : { - "avg_height" : { "avg" : { "field" : "height" } } + "avg_play_count" : { "avg" : { "field" : "play_count" } } } } } @@ -299,13 +303,13 @@ Ordering the buckets by multi value metrics sub-aggregation (identified by the a -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { - "field" : "gender", - "order" : { "height_stats.avg" : "desc" } + "field" : "genre", + "order" : { "playback_stats.avg" : "desc" } }, "aggs" : { - "height_stats" : { "stats" : { "field" : "height" } } + "playback_stats" : { "stats" : { "field" : "play_count" } } } } } @@ -343,14 +347,14 @@ PATH := []*[height_stats.avg" : "desc" } + "field" : "artist.country", + "order" : { "rock>playback_stats.avg" : "desc" } }, "aggs" : { - "females" : { - "filter" : { "term" : { "gender" : "female" }}, + "rock" : { + "filter" : { "term" : { "genre" : "rock" }}, "aggs" : { - "height_stats" : { "stats" : { "field" : "height" }} + "playback_stats" : { "stats" : { "field" : "play_count" }} } } } @@ -359,7 +363,7 @@ PATH := []*[height_stats.avg" : "desc" }, { "_count" : "desc" } ] + "field" : "artist.country", + "order" : [ { "rock>playback_stats.avg" : "desc" }, { "_count" : "desc" } ] }, "aggs" : { - "females" : { - "filter" : { "term" : { "gender" : { "female" }}}, + "rock" : { + "filter" : { "term" : { "genre" : { "rock" }}}, "aggs" : { - "height_stats" : { "stats" : { "field" : "height" }} + "playback_stats" : { "stats" : { "field" : "play_count" }} } } } @@ -385,7 +389,7 @@ Multiple criteria can be used to order the buckets by providing an array of orde } -------------------------------------------------- -The above will sort the countries buckets based on the average height among the female population and then by +The above will sort the artist's countries buckets based on the average play count among the rock songs and then by their `doc_count` in descending order. NOTE: In the event that two buckets share the same values for all order criteria the bucket's term value is used as a @@ -439,10 +443,10 @@ Generating the terms using a script: -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { "script" : { - "inline": "doc['gender'].value" + "inline": "doc['genre'].value" "lang": "painless" } } @@ -457,12 +461,12 @@ This will interpret the `script` parameter as an `inline` script with the defaul -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { "script" : { "file": "my_script", "params": { - "field": "gender" + "field": "genre" } } } @@ -480,11 +484,11 @@ TIP: for indexed scripts replace the `file` parameter with an `id` parameter. -------------------------------------------------- { "aggs" : { - "genders" : { + "genres" : { "terms" : { - "field" : "gender", + "field" : "gendre", "script" : { - "inline" : "'Gender: ' +_value" + "inline" : "'Genre: ' +_value" "lang" : "painless" } } diff --git a/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc b/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc index 9c4ee59cccf..b8ee0508618 100644 --- a/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/cardinality-aggregation.asciidoc @@ -45,9 +45,7 @@ experimental[The `precision_threshold` option is specific to the current interna defines a unique count below which counts are expected to be close to accurate. Above this value, counts might become a bit more fuzzy. The maximum supported value is 40000, thresholds above this number will have the same -effect as a threshold of 40000. -Default value depends on the number of parent aggregations that multiple -create buckets (such as terms or histograms). +effect as a threshold of 40000. The default values is +3000+. ==== Counts are approximate @@ -71,11 +69,70 @@ about `c * 8` bytes. The following chart shows how the error varies before and after the threshold: +//// +To generate this chart use this gnuplot script: +------- +#!/usr/bin/gnuplot +reset +set terminal png size 1000,400 + +set xlabel "Actual cardinality" +set logscale x + +set ylabel "Relative error (%)" +set yrange [0:8] + +set title "Cardinality error" +set grid + +set style data lines + +plot "test.dat" using 1:2 title "threshold=100", \ +"" using 1:3 title "threshold=1000", \ +"" using 1:4 title "threshold=10000" +# +------- + +and generate data in a 'test.dat' file using the below Java code: + +------- +private static double error(HyperLogLogPlusPlus h, long expected) { + double actual = h.cardinality(0); + return Math.abs(expected - actual) / expected; +} + +public static void main(String[] args) { + HyperLogLogPlusPlus h100 = new HyperLogLogPlusPlus(precisionFromThreshold(100), BigArrays.NON_RECYCLING_INSTANCE, 1); + HyperLogLogPlusPlus h1000 = new HyperLogLogPlusPlus(precisionFromThreshold(1000), BigArrays.NON_RECYCLING_INSTANCE, 1); + HyperLogLogPlusPlus h10000 = new HyperLogLogPlusPlus(precisionFromThreshold(10000), BigArrays.NON_RECYCLING_INSTANCE, 1); + + int next = 100; + int step = 10; + + for (int i = 1; i <= 10000000; ++i) { + long h = BitMixer.mix64(i); + h100.collect(0, h); + h1000.collect(0, h); + h10000.collect(0, h); + + if (i == next) { + System.out.println(i + " " + error(h100, i)*100 + " " + error(h1000, i)*100 + " " + error(h10000, i)*100); + next += step; + if (next >= 100 * step) { + step *= 10; + } + } + } +} +------- + +//// + image:images/cardinality_error.png[] For all 3 thresholds, counts have been accurate up to the configured threshold (although not guaranteed, this is likely to be the case). Please also note that -even with a threshold as low as 100, the error remains under 5%, even when +even with a threshold as low as 100, the error remains very low, even when counting millions of items. ==== Pre-computed hashes diff --git a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc index 83855a8aae5..1b955d2a898 100644 --- a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc @@ -22,7 +22,7 @@ The top_hits aggregation returns regular search hits, because of this many per h * <> * <> * <> -* <> +* <> * <> ==== Example diff --git a/docs/reference/analysis/analyzers/configuring.asciidoc b/docs/reference/analysis/analyzers/configuring.asciidoc index 2ce13702e00..d4c606df24d 100644 --- a/docs/reference/analysis/analyzers/configuring.asciidoc +++ b/docs/reference/analysis/analyzers/configuring.asciidoc @@ -38,8 +38,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "field": "my_text", <2> @@ -98,4 +96,3 @@ POST my_index/_analyze // TESTRESPONSE ///////////////////// - diff --git a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc index 1707a9a399b..f14759856dd 100644 --- a/docs/reference/analysis/analyzers/custom-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/custom-analyzer.asciidoc @@ -74,8 +74,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_custom_analyzer", @@ -196,8 +194,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_custom_analyzer", @@ -258,4 +254,3 @@ The above example produces the following terms: --------------------------- [ i'm, _happy_, person, you ] --------------------------- - diff --git a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc index 24dc92380bb..c917cc7aa03 100644 --- a/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/fingerprint-analyzer.asciidoc @@ -114,8 +114,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_fingerprint_analyzer", diff --git a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc index bd6000c3de7..448f5289d54 100644 --- a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc @@ -195,8 +195,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_email_analyzer", @@ -285,8 +283,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - GET my_index/_analyze { "analyzer": "camel", diff --git a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc index 3b948892483..bc49be50792 100644 --- a/docs/reference/analysis/analyzers/standard-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/standard-analyzer.asciidoc @@ -179,8 +179,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_english_analyzer", @@ -278,4 +276,3 @@ The above example produces the following terms: --------------------------- [ 2, quick, brown, foxes, jumpe, d, over, lazy, dog's, bone ] --------------------------- - diff --git a/docs/reference/analysis/analyzers/stop-analyzer.asciidoc b/docs/reference/analysis/analyzers/stop-analyzer.asciidoc index e40436342d7..31cb07ce706 100644 --- a/docs/reference/analysis/analyzers/stop-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/stop-analyzer.asciidoc @@ -154,8 +154,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_stop_analyzer", @@ -240,5 +238,3 @@ The above example produces the following terms: --------------------------- [ quick, brown, foxes, jumped, lazy, dog, s, bone ] --------------------------- - - diff --git a/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc b/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc index 3d8b187d772..217b618c9c2 100644 --- a/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/htmlstrip-charfilter.asciidoc @@ -93,8 +93,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_analyzer", @@ -130,6 +128,3 @@ The above example produces the following term: --------------------------- [ \nI'm so happy!\n ] --------------------------- - - - diff --git a/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc b/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc index ed90e9f6ab6..30e565d443a 100644 --- a/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/mapping-charfilter.asciidoc @@ -66,8 +66,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_analyzer", @@ -134,8 +132,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_analyzer", diff --git a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc index 9299eb800b5..3f4bf9aa05a 100644 --- a/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc +++ b/docs/reference/analysis/charfilters/pattern-replace-charfilter.asciidoc @@ -58,8 +58,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_analyzer", @@ -123,8 +121,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_analyzer", diff --git a/docs/reference/analysis/testing.asciidoc b/docs/reference/analysis/testing.asciidoc index ab50f6564bb..0a603973e18 100644 --- a/docs/reference/analysis/testing.asciidoc +++ b/docs/reference/analysis/testing.asciidoc @@ -69,8 +69,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - GET my_index/_analyze <3> { "analyzer": "std_folded", <4> diff --git a/docs/reference/analysis/tokenfilters/hunspell-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/hunspell-tokenfilter.asciidoc index 47b247e4bd5..9b3f188d951 100644 --- a/docs/reference/analysis/tokenfilters/hunspell-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/hunspell-tokenfilter.asciidoc @@ -87,12 +87,12 @@ the stemming is determined by the quality of the dictionary. [float] ==== Dictionary loading -By default, the default Hunspell directory (`config/hunspell/`) is checked -for dictionaries when the node starts up, and any dictionaries are +By default, the default Hunspell directory (`config/hunspell/`) is checked +for dictionaries when the node starts up, and any dictionaries are automatically loaded. Dictionary loading can be deferred until they are actually used by setting -`indices.analysis.hunspell.dictionary.lazy` to `true`in the config file. +`indices.analysis.hunspell.dictionary.lazy` to `true` in the config file. [float] ==== References diff --git a/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc index 45d4ad41526..52bdcbd7732 100644 --- a/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/classic-tokenizer.asciidoc @@ -164,8 +164,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_analyzer", @@ -264,6 +262,3 @@ The above example produces the following terms: --------------------------- [ The, 2, QUICK, Brown, Foxes, jumpe, d, over, the, lazy, dog's, bone ] --------------------------- - - - diff --git a/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc index 0728767896a..3ef526325e7 100644 --- a/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/edgengram-tokenizer.asciidoc @@ -128,8 +128,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_analyzer", @@ -319,5 +317,3 @@ GET my_index/_search ---------------------------- // TESTRESPONSE[s/"took".*/"took": "$body.took",/] ///////////////////// - - diff --git a/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc index cf45da0627e..ae3183f0fd1 100644 --- a/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/ngram-tokenizer.asciidoc @@ -230,8 +230,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_analyzer", @@ -302,5 +300,3 @@ The above example produces the following terms: --------------------------- [ Qui, uic, ick, Fox, oxe, xes ] --------------------------- - - diff --git a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc index b656e67eaec..55aa7d66da3 100644 --- a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc @@ -114,8 +114,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_analyzer", @@ -172,4 +170,3 @@ If we were to set `reverse` to `true`, it would produce the following: --------------------------- [ one/two/three/, two/three/, three/ ] --------------------------- - diff --git a/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc index ca902a4e5f2..c96fd08c952 100644 --- a/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pattern-tokenizer.asciidoc @@ -130,8 +130,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_analyzer", @@ -223,8 +221,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_analyzer", diff --git a/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc index ee052529b43..9f77a0e13dc 100644 --- a/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/standard-tokenizer.asciidoc @@ -155,8 +155,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_analyzer", @@ -269,6 +267,3 @@ The above example produces the following terms: --------------------------- [ The, 2, QUICK, Brown, Foxes, jumpe, d, over, the, lazy, dog's, bone ] --------------------------- - - - diff --git a/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc index 500a5e191f1..7fea0f1e8d8 100644 --- a/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/uaxurlemail-tokenizer.asciidoc @@ -111,8 +111,6 @@ PUT my_index } } -GET _cluster/health?wait_for_status=yellow - POST my_index/_analyze { "analyzer": "my_analyzer", diff --git a/docs/reference/cluster/allocation-explain.asciidoc b/docs/reference/cluster/allocation-explain.asciidoc index 3b07b40d9e7..ec223722c68 100644 --- a/docs/reference/cluster/allocation-explain.asciidoc +++ b/docs/reference/cluster/allocation-explain.asciidoc @@ -183,10 +183,19 @@ shard it finds by sending an empty body, such as: $ curl -XGET 'http://localhost:9200/_cluster/allocation/explain' -------------------------------------------------- -And if you would like to include all decisions that were factored into the final +If you would like to include all decisions that were factored into the final decision, the `include_yes_decisions` parameter will return all decisions: [source,js] -------------------------------------------------- $ curl -XGET 'http://localhost:9200/_cluster/allocation/explain?include_yes_decisions=true' -------------------------------------------------- + +Additionally, you can return information gathered by the cluster info service +about disk usage and shard sizes by setting the `include_disk_info` parameter to +`true`: + +[source,js] +-------------------------------------------------- +$ curl -XGET 'http://localhost:9200/_cluster/allocation/explain?include_disk_info=true' +-------------------------------------------------- diff --git a/docs/reference/cluster/health.asciidoc b/docs/reference/cluster/health.asciidoc index 137b4ac48cd..89806011b9d 100644 --- a/docs/reference/cluster/health.asciidoc +++ b/docs/reference/cluster/health.asciidoc @@ -2,37 +2,50 @@ == Cluster Health The cluster health API allows to get a very simple status on the health -of the cluster. +of the cluster. For example, on a quiet single node cluster with a single index +with 5 shards and one replica, this: [source,js] -------------------------------------------------- -$ curl -XGET 'http://localhost:9200/_cluster/health?pretty=true' +GET _cluster/health +-------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT test1\n/] + +Returns this: +-------------------------------------------------- { "cluster_name" : "testcluster", - "status" : "green", + "status" : "yellow", "timed_out" : false, - "number_of_nodes" : 2, - "number_of_data_nodes" : 2, + "number_of_nodes" : 1, + "number_of_data_nodes" : 1, "active_primary_shards" : 5, - "active_shards" : 10, + "active_shards" : 5, "relocating_shards" : 0, "initializing_shards" : 0, - "unassigned_shards" : 0, + "unassigned_shards" : 5, "delayed_unassigned_shards": 0, "number_of_pending_tasks" : 0, "number_of_in_flight_fetch": 0, "task_max_waiting_in_queue_millis": 0, - "active_shards_percent_as_number": 100 + "active_shards_percent_as_number": 50.0 } -------------------------------------------------- +// TESTRESPONSE[s/testcluster/docs_integTest/] +// TESTRESPONSE[s/"number_of_pending_tasks" : 0,/"number_of_pending_tasks" : $body.number_of_pending_tasks,/] +// TESTRESPONSE[s/"task_max_waiting_in_queue_millis": 0/"task_max_waiting_in_queue_millis": $body.task_max_waiting_in_queue_millis/] + The API can also be executed against one or more indices to get just the specified indices health: [source,js] -------------------------------------------------- -$ curl -XGET 'http://localhost:9200/_cluster/health/test1,test2' +GET /_cluster/health/test1,test2 -------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT test1\nPUT test2\n/] The cluster health status is: `green`, `yellow` or `red`. On the shard level, a `red` status indicates that the specific shard is not allocated @@ -49,8 +62,9 @@ it will return at that point): [source,js] -------------------------------------------------- -$ curl -XGET 'http://localhost:9200/_cluster/health?wait_for_status=yellow&timeout=50s' +GET /_cluster/health?wait_for_status=yellow&timeout=50s -------------------------------------------------- +// CONSOLE [float] [[request-params]] @@ -97,5 +111,7 @@ The following is an example of getting the cluster health at the [source,js] -------------------------------------------------- -$ curl -XGET 'http://localhost:9200/_cluster/health/twitter?level=shards' +GET /_cluster/health/twitter?level=shards -------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] diff --git a/docs/reference/docs/delete-by-query.asciidoc b/docs/reference/docs/delete-by-query.asciidoc index 8b37e0a1220..d9c165ddcf0 100644 --- a/docs/reference/docs/delete-by-query.asciidoc +++ b/docs/reference/docs/delete-by-query.asciidoc @@ -40,7 +40,7 @@ That will return something like this: "search": 0 }, "throttled_millis": 0, - "requests_per_second": "unlimited", + "requests_per_second": -1.0, "throttled_until_millis": 0, "total": 119, "failures" : [ ] @@ -54,6 +54,10 @@ conflict if the document changes between the time when the snapshot was taken and when the delete request is processed. When the versions match the document is deleted. +NOTE: Since `internal` versioning does not support the value 0 as a valid +version number, documents with version equal to zero cannot be deleted using +`_delete_by_query` and will fail the request. + During the `_delete_by_query` execution, multiple search requests are sequentially executed in order to find all the matching documents to delete. Every time a batch of documents is found, a corresponding bulk request is executed to delete all @@ -97,7 +101,7 @@ POST twitter,blog/tweet,post/_delete_by_query } -------------------------------------------------- // CONSOLE -// TEST[s/^/PUT twitter\nPUT blog\nGET _cluster\/health?wait_for_status=yellow\n/] +// TEST[s/^/PUT twitter\nPUT blog\n/] If you provide `routing` then the routing is copied to the scroll query, limiting the process to the shards that match that routing value: @@ -160,15 +164,15 @@ request. `timeout` controls how long each write request waits for unavailable shards to become available. Both work exactly how they work in the <>. -`requests_per_second` can be set to any decimal number (`1.4`, `6`, `1000`, etc) -and throttles the number of requests per second that the delete by query issues. -The throttling is done waiting between bulk batches so that it can manipulate -the scroll timeout. The wait time is the difference between the time it took the -batch to complete and the time `requests_per_second * requests_in_the_batch`. -Since the batch isn't broken into multiple bulk requests large batch sizes will -cause Elasticsearch to create many requests and then wait for a while before -starting the next set. This is "bursty" instead of "smooth". The default is -`unlimited` which is also the only non-number value that it accepts. +`requests_per_second` can be set to any positive decimal number (`1.4`, `6`, +`1000`, etc) and throttles the number of requests per second that the delete-by-query +issues or it can be set to `-1` to disabled throttling. The throttling is done +waiting between bulk batches so that it can manipulate the scroll timeout. The +wait time is the difference between the time it took the batch to complete and +the time `requests_per_second * requests_in_the_batch`. Since the batch isn't +broken into multiple bulk requests large batch sizes will cause Elasticsearch +to create many requests and then wait for a while before starting the next set. +This is "bursty" instead of "smooth". The default is `-1`. [float] === Response body @@ -322,14 +326,15 @@ using the `_rethrottle` API: [source,js] -------------------------------------------------- -POST _delete_by_query/taskid:1/_rethrottle?requests_per_second=unlimited +POST _delete_by_query/taskid:1/_rethrottle?requests_per_second=-1 -------------------------------------------------- // CONSOLE The `task_id` can be found using the tasks API above. Just like when setting it on the `_delete_by_query` API `requests_per_second` -can be either `unlimited` to disable throttling or any decimal number like `1.7` -or `12` to throttle to that level. Rethrottling that speeds up the query takes -effect immediately but rethrotting that slows down the query will take effect -on after completing the current batch. This prevents scroll timeouts. +can be either `-1` to disable throttling or any decimal number +like `1.7` or `12` to throttle to that level. Rethrottling that speeds up the +query takes effect immediately but rethrotting that slows down the query will +take effect on after completing the current batch. This prevents scroll +timeouts. diff --git a/docs/reference/docs/index_.asciidoc b/docs/reference/docs/index_.asciidoc index e1c260ae48f..dda75dd5aa5 100644 --- a/docs/reference/docs/index_.asciidoc +++ b/docs/reference/docs/index_.asciidoc @@ -119,6 +119,14 @@ indexed and the new version number used. If the value provided is less than or equal to the stored document's version number, a version conflict will occur and the index operation will fail. +WARNING: External versioning supports the value 0 as a valid version number. +This allows the version to be in sync with an external versioning system +where version numbers start from zero instead of one. It has the side effect +that documents with version number equal to zero cannot neither be updated +using the <> nor be deleted +using the <> as long as their +version number is equal to zero. + A nice side effect is that there is no need to maintain strict ordering of async indexing operations executed as a result of changes to a source database, as long as version numbers from the source database are used. diff --git a/docs/reference/docs/refresh.asciidoc b/docs/reference/docs/refresh.asciidoc index dd829e19bc3..90c5d4e3afe 100644 --- a/docs/reference/docs/refresh.asciidoc +++ b/docs/reference/docs/refresh.asciidoc @@ -17,11 +17,12 @@ indexing and a search standpoint. Wait for the changes made by the request to be made visible by a refresh before replying. This doesn't force an immediate refresh, rather, it waits for a -refresh happen. Elasticsearch automatically refreshes shards that have changed +refresh to happen. Elasticsearch automatically refreshes shards that have changed every `index.refresh_interval` which defaults to one second. That setting is -<>. The <> API will also -cause the request to return, as will setting `refresh` to `true` on any of the -APIs that support it. +<>. Calling the <> API or +setting `refresh` to `true` on any of the APIs that support it will also +cause a refresh, in turn causing already running requests with `refresh=wait_for` +to return. `false` (the default):: @@ -36,7 +37,7 @@ use `refresh=false`, or, because that is the default, just leave the `refresh` parameter out of the URL. That is the simplest and fastest choice. If you absolutely must have the changes made by a request visible synchronously -with the request then you must get to pick between putting more load on +with the request then you must pick between putting more load on Elasticsearch (`true`) and waiting longer for the response (`wait_for`). Here are a few points that should inform that decision: @@ -97,7 +98,7 @@ search: -------------------------------------------------- PUT /test/test/3 {"test": "test"} -PUT /test/test/4?refresh=true +PUT /test/test/4?refresh=false {"test": "test"} -------------------------------------------------- // CONSOLE diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index aac722e96a3..7a5a5cbb865 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -39,7 +39,7 @@ That will return something like this: "search": 0 }, "throttled_millis": 0, - "requests_per_second": "unlimited", + "requests_per_second": -1.0, "throttled_until_millis": 0, "total": 120, "failures" : [ ] @@ -178,7 +178,7 @@ POST _reindex } -------------------------------------------------- // CONSOLE -// TEST[s/^/PUT twitter\nPUT blog\nGET _cluster\/health?wait_for_status=yellow\n/] +// TEST[s/^/PUT twitter\nPUT blog\n/] It's also possible to limit the number of processed documents by setting `size`. This will only copy a single document from `twitter` to @@ -320,7 +320,7 @@ POST _reindex } -------------------------------------------------- // CONSOLE -// TEST[s/^/PUT source\nGET _cluster\/health?wait_for_status=yellow\n/] +// TEST[s/^/PUT source\n/] By default `_reindex` uses scroll batches of 1000. You can change the batch size with the `size` field in the `source` element: @@ -340,7 +340,7 @@ POST _reindex } -------------------------------------------------- // CONSOLE -// TEST[s/^/PUT source\nGET _cluster\/health?wait_for_status=yellow\n/] +// TEST[s/^/PUT source\n/] Reindex can also use the <> feature by specifying a `pipeline` like this: @@ -359,7 +359,61 @@ POST _reindex } -------------------------------------------------- // CONSOLE -// TEST[s/^/PUT source\nGET _cluster\/health?wait_for_status=yellow\n/] +// TEST[s/^/PUT source\n/] + +[float] +=== Reindex from Remote + +Reindex supports reindexing from a remote Elasticsearch cluster: + +[source,js] +-------------------------------------------------- +POST _reindex +{ + "source": { + "remote": { + "host": "http://otherhost:9200", + "username": "user", + "password": "pass" + }, + "index": "source", + "query": { + "match": { + "test": "data" + } + } + }, + "dest": { + "index": "dest" + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:host] +// TEST[s/^/PUT source\n/] +// TEST[s/otherhost:9200",/\${host}"/] +// TEST[s/"username": "user",//] +// TEST[s/"password": "pass"//] + +The `host` parameter must contain a scheme, host, and port (e.g. +`https://otherhost:9200`). The `username` and `password` parameters are +optional and when they are present reindex will connect to the remote +Elasticsearch node using using basic auth. Be sure to use `https` when using +basic auth or the password will be sent in plain text. + +Remote hosts have to be explicitly whitelisted in elasticsearch.yaml using the +`reindex.remote.whitelist` property. It can be set to a comma delimited list +of allowed remote `host` and `port` combinations (e.g. +`otherhost:9200, another:9200`). Scheme is ignored by the whitelist - only host +and port are used. + +This feature should work with remote clusters of any version of Elasticsearch +you are likely to find. This should allow you to upgrade from any version of +Elasticsearch to the current version by reindexing from a cluster of the old +version. + +To enable queries sent to older versions of Elasticsearch the `query` parameter +is sent directly to the remote host without validation or modification. [float] === URL Parameters @@ -385,15 +439,15 @@ request. `timeout` controls how long each write request waits for unavailable shards to become available. Both work exactly how they work in the <>. -`requests_per_second` can be set to any decimal number (`1.4`, `6`, `1000`, etc) -and throttles the number of requests per second that the reindex issues. The -throttling is done waiting between bulk batches so that it can manipulate the -scroll timeout. The wait time is the difference between the time it took the -batch to complete and the time `requests_per_second * requests_in_the_batch`. -Since the batch isn't broken into multiple bulk requests large batch sizes will -cause Elasticsearch to create many requests and then wait for a while before -starting the next set. This is "bursty" instead of "smooth". The default is -`unlimited` which is also the only non-number value that it accepts. +`requests_per_second` can be set to any positive decimal number (`1.4`, `6`, +`1000`, etc) and throttles the number of requests per second that the reindex +issues or it can be set to `-1` to disabled throttling. The throttling is done +waiting between bulk batches so that it can manipulate the scroll timeout. The +wait time is the difference between the time it took the batch to complete and +the time `requests_per_second * requests_in_the_batch`. Since the batch isn't +broken into multiple bulk requests large batch sizes will cause Elasticsearch +to create many requests and then wait for a while before starting the next set. +This is "bursty" instead of "smooth". The default is `-1`. [float] [[docs-reindex-response-body]] @@ -559,18 +613,18 @@ the `_rethrottle` API: [source,js] -------------------------------------------------- -POST _reindex/taskid:1/_rethrottle?requests_per_second=unlimited +POST _reindex/taskid:1/_rethrottle?requests_per_second=-1 -------------------------------------------------- // CONSOLE The `task_id` can be found using the tasks API above. -Just like when setting it on the `_reindex` API `requests_per_second` can be -either `unlimited` to disable throttling or any decimal number like `1.7` or -`12` to throttle to that level. Rethrottling that speeds up the query takes -effect immediately but rethrotting that slows down the query will take effect -on after completing the current batch. This prevents scroll timeouts. - +Just like when setting it on the `_reindex` API `requests_per_second` +can be either `-1` to disable throttling or any decimal number +like `1.7` or `12` to throttle to that level. Rethrottling that speeds up the +query takes effect immediately but rethrotting that slows down the query will +take effect on after completing the current batch. This prevents scroll +timeouts. [float] === Reindex to change the name of a field diff --git a/docs/reference/docs/update-by-query.asciidoc b/docs/reference/docs/update-by-query.asciidoc index 56ad1c7cd9a..ddf79eaef97 100644 --- a/docs/reference/docs/update-by-query.asciidoc +++ b/docs/reference/docs/update-by-query.asciidoc @@ -32,7 +32,7 @@ That will return something like this: "search": 0 }, "throttled_millis": 0, - "requests_per_second": "unlimited", + "requests_per_second": -1.0, "throttled_until_millis": 0, "total": 120, "failures" : [ ] @@ -46,6 +46,10 @@ conflict if the document changes between the time when the snapshot was taken and when the index request is processed. When the versions match the document is updated and the version number is incremented. +NOTE: Since `internal` versioning does not support the value 0 as a valid +version number, documents with version equal to zero cannot be updated using +`_update_by_query` and will fail the request. + All update and query failures cause the `_update_by_query` to abort and are returned in the `failures` of the response. The updates that have been performed still stick. In other words, the process is not rolled back, only @@ -152,7 +156,7 @@ types at once, just like the search API: POST twitter,blog/tweet,post/_update_by_query -------------------------------------------------- // CONSOLE -// TEST[s/^/PUT twitter\nPUT blog\nGET _cluster\/health?wait_for_status=yellow\n/] +// TEST[s/^/PUT twitter\nPUT blog\n/] If you provide `routing` then the routing is copied to the scroll query, limiting the process to the shards that match that routing value: @@ -217,15 +221,15 @@ request. `timeout` controls how long each write request waits for unavailable shards to become available. Both work exactly how they work in the <>. -`requests_per_second` can be set to any decimal number (`1.4`, `6`, `1000`, etc) -and throttles the number of requests per second that the update by query issues. -The throttling is done waiting between bulk batches so that it can manipulate -the scroll timeout. The wait time is the difference between the time it took the -batch to complete and the time `requests_per_second * requests_in_the_batch`. -Since the batch isn't broken into multiple bulk requests large batch sizes will -cause Elasticsearch to create many requests and then wait for a while before -starting the next set. This is "bursty" instead of "smooth". The default is -`unlimited` which is also the only non-number value that it accepts. +`requests_per_second` can be set to any positive decimal number (`1.4`, `6`, +`1000`, etc) and throttles the number of requests per second that the update-by-query +issues or it can be set to `-1` to disabled throttling. The throttling is done +waiting between bulk batches so that it can manipulate the scroll timeout. The +wait time is the difference between the time it took the batch to complete and +the time `requests_per_second * requests_in_the_batch`. Since the batch isn't +broken into multiple bulk requests large batch sizes will cause Elasticsearch +to create many requests and then wait for a while before starting the next set. +This is "bursty" instead of "smooth". The default is `-1`. [float] [[docs-update-by-query-response-body]] @@ -387,18 +391,18 @@ using the `_rethrottle` API: [source,js] -------------------------------------------------- -POST _update_by_query/taskid:1/_rethrottle?requests_per_second=unlimited +POST _update_by_query/taskid:1/_rethrottle?requests_per_second=-1 -------------------------------------------------- // CONSOLE The `task_id` can be found using the tasks API above. Just like when setting it on the `_update_by_query` API `requests_per_second` -can be either `unlimited` to disable throttling or any decimal number like `1.7` -or `12` to throttle to that level. Rethrottling that speeds up the query takes -effect immediately but rethrotting that slows down the query will take effect -on after completing the current batch. This prevents scroll timeouts. - +can be either `-1` to disable throttling or any decimal number +like `1.7` or `12` to throttle to that level. Rethrottling that speeds up the +query takes effect immediately but rethrotting that slows down the query will +take effect on after completing the current batch. This prevents scroll +timeouts. [float] [[picking-up-a-new-property]] diff --git a/docs/reference/how-to.asciidoc b/docs/reference/how-to.asciidoc index ee954553617..f41c3a3bb9c 100644 --- a/docs/reference/how-to.asciidoc +++ b/docs/reference/how-to.asciidoc @@ -15,6 +15,8 @@ This section provides guidance about which changes should and shouldn't be made. -- +include::how-to/general.asciidoc[] + include::how-to/indexing-speed.asciidoc[] include::how-to/search-speed.asciidoc[] diff --git a/docs/reference/how-to/disk-usage.asciidoc b/docs/reference/how-to/disk-usage.asciidoc index 6465690ec96..59e82d7efe1 100644 --- a/docs/reference/how-to/disk-usage.asciidoc +++ b/docs/reference/how-to/disk-usage.asciidoc @@ -152,8 +152,9 @@ space. They can be compressed more aggressively by using the `best_compression` [float] === Use the smallest numeric type that is sufficient -When storing <>, using `float` over `double`, or `half_float` -over `float` can help save storage. This is also true for integer types, but less -since Elasticsearch will more easily compress them based on the number of bits -that they actually need. - +The type that you pick for <> can have a significant impact +on disk usage. In particular, integers should be stored using an integer type +(`byte`, `short`, `integer` or `long`) and floating points should either be +stored in a `scaled_float` if appropriate or in the smallest type that fits the +use-case: using `float` over `double`, or `half_float` over `float` will help +save storage. diff --git a/docs/reference/how-to/general.asciidoc b/docs/reference/how-to/general.asciidoc new file mode 100644 index 00000000000..60f0181b2bb --- /dev/null +++ b/docs/reference/how-to/general.asciidoc @@ -0,0 +1,104 @@ +[[general-recommendations]] +== General recommendations + +[float] +[[large-size]] +=== Don't return large result sets + +Elasticsearch is designed as a search engine, which makes it very good at +getting back the top documents that match a query. However, it is not as good +for workloads that fall into the database domain, such as retrieving all +documents that match a particular query. If you need to do this, make sure to +use the <> API. + +[float] +[[sparsity]] +=== Avoid sparsity + +The data-structures behind Lucene, which Elasticsearch relies on in order to +index and store data, work best with dense data, ie. when all documents have the +same fields. This is especially true for fields that have norms enabled (which +is the case for `text` fields by default) or doc values enabled (which is the +case for numerics, `date`, `ip` and `keyword` by default). + +The reason is that Lucene internally identifies documents with so-called doc +ids, which are integers between 0 and the total number of documents in the +index. These doc ids are used for communication between the internal APIs of +Lucene: for instance searching on a term with a `match` query produces an +iterator of doc ids, and these doc ids are then used to retrieve the value of +the `norm` in order to compute a score for these documents. The way this `norm` +lookup is implemented currently is by reserving one byte for each document. +The `norm` value for a given doc id can then be retrieved by reading the +byte at index `doc_id`. While this is very efficient and helps Lucene quickly +have access to the `norm` values of every document, this has the drawback that +documents that do not have a value will also require one byte of storage. + +In practice, this means that if an index has `M` documents, norms will require +`M` bytes of storage *per field*, even for fields that only appear in a small +fraction of the documents of the index. Although slightly more complex with doc +values due to the fact that doc values have multiple ways that they can be +encoded depending on the type of field and on the actual data that the field +stores, the problem is very similar. In case you wonder: `fielddata`, which was +used in Elasticsearch pre-2.0 before being replaced with doc values, also +suffered from this issue, except that the impact was only on the memory +footprint since `fielddata` was not explicitly materialized on disk. + +Note that even though the most notable impact of sparsity is on storage +requirements, it also has an impact on indexing speed and search speed since +these bytes for documents that do not have a field still need to be written +at index time and skipped over at search time. + +It is totally fine to have a minority of sparse fields in an index. But beware +that if sparsity becomes the rule rather than the exception, then the index +will not be as efficient as it could be. + +This section mostly focused on `norms` and `doc values` because those are the +two features that are most affected by sparsity. Sparsity also affect the +efficiency of the inverted index (used to index `text`/`keyword` fields) and +dimensional points (used to index `geo_point` and numerics) but to a lesser +extent. + +Here are some recommendations that can help avoid sparsity: + +[float] +==== Avoid putting unrelated data in the same index + +You should avoid putting documents that have totally different structures into +the same index in order to avoid sparsity. It is often better to put these +documents into different indices, you could also consider giving fewer shards +to these smaller indices since they will contain fewer documents overall. + +Note that this advice does not apply in the case that you need to use +parent/child relations between your documents since this feature is only +supported on documents that live in the same index. + +[float] +==== Normalize document structures + +Even if you really need to put different kinds of documents in the same index, +maybe there are opportunities to reduce sparsity. For instance if all documents +in the index have a timestamp field but some call it `timestamp` and others +call it `creation_date`, it would help to rename it so that all documents have +the same field name for the same data. + +[float] +==== Avoid types + +Types might sound like a good way to store multiple tenants in a single index. +They are not: given that types store everything in a single index, having +multiple types that have different fields in a single index will also cause +problems due to sparsity as described above. If your types do not have very +similar mappings, you might want to consider moving them to a dedicated index. + +[float] +==== Disable `norms` and `doc_values` on sparse fields + +If none of the above recommendations apply in your case, you might want to +check whether you actually need `norms` and `doc_values` on your sparse fields. +`norms` can be disabled if producing scores is not necessary on a field, this is +typically true for fields that are only used for filtering. `doc_values` can be +disabled on fields that are neither used for sorting nor for aggregations. +Beware that this decision should not be made lightly since these parameters +cannot be changed on a live index, so you would have to reindex if you realize +that you need `norms` or `doc_values`. + diff --git a/docs/reference/images/cardinality_error.png b/docs/reference/images/cardinality_error.png index 1f871c2c674..cf405be69ab 100644 Binary files a/docs/reference/images/cardinality_error.png and b/docs/reference/images/cardinality_error.png differ diff --git a/docs/reference/indices/analyze.asciidoc b/docs/reference/indices/analyze.asciidoc index d9016c9e8ec..5f75da11176 100644 --- a/docs/reference/indices/analyze.asciidoc +++ b/docs/reference/indices/analyze.asciidoc @@ -51,6 +51,18 @@ curl -XGET 'localhost:9200/_analyze' -d ' deprecated[5.0.0, Use `filter`/`token_filter`/`char_filter` instead of `filters`/`token_filters`/`char_filters`] +Custom tokenizers, token filters, and character filters can be specified in the request body as follows: + +[source,js] +-------------------------------------------------- +curl -XGET 'localhost:9200/_analyze' -d ' +{ + "tokenizer" : "whitespace", + "filter" : ["lowercase", {"type": "stop", "stopwords": ["a", "is", "this"]}], + "text" : "this is a test" +}' +-------------------------------------------------- + It can also run against a specific index: [source,js] @@ -70,7 +82,7 @@ can also be provided to use a different analyzer: curl -XGET 'localhost:9200/test/_analyze' -d ' { "analyzer" : "whitespace", - "text : "this is a test" + "text" : "this is a test" }' -------------------------------------------------- diff --git a/docs/reference/indices/shadow-replicas.asciidoc b/docs/reference/indices/shadow-replicas.asciidoc index 60360c147b5..3a0b23852b0 100644 --- a/docs/reference/indices/shadow-replicas.asciidoc +++ b/docs/reference/indices/shadow-replicas.asciidoc @@ -10,12 +10,12 @@ index. In order to fully utilize the `index.data_path` and `index.shadow_replicas` settings, you need to allow Elasticsearch to use the same data directory for -multiple instances by setting `node.add_id_to_custom_path` to false in +multiple instances by setting `node.add_lock_id_to_custom_path` to false in elasticsearch.yml: [source,yaml] -------------------------------------------------- -node.add_id_to_custom_path: false +node.add_lock_id_to_custom_path: false -------------------------------------------------- You will also need to indicate to the security manager where the custom indices @@ -114,7 +114,7 @@ settings API: These are non-dynamic settings that need to be configured in `elasticsearch.yml` -`node.add_id_to_custom_path`:: +`node.add_lock_id_to_custom_path`:: Boolean setting indicating whether Elasticsearch should append the node's ordinal to the custom data path. For example, if this is enabled and a path of "/tmp/foo" is used, the first locally-running node will use "/tmp/foo/0", diff --git a/docs/reference/indices/shrink-index.asciidoc b/docs/reference/indices/shrink-index.asciidoc index 39c5134e23c..237b46dfc00 100644 --- a/docs/reference/indices/shrink-index.asciidoc +++ b/docs/reference/indices/shrink-index.asciidoc @@ -2,8 +2,8 @@ == Shrink Index The shrink index API allows you to shrink an existing index into a new index -with fewer primary shards. The number of primary shards in the target index -must be a factor of the shards in the source index. For example an index with +with fewer primary shards. The requested number of primary shards in the target index +must be a factor of the number of shards in the source index. For example an index with `8` primary shards can be shrunk into `4`, `2` or `1` primary shards or an index with `15` primary shards can be shrunk into `5`, `3` or `1`. If the number of shards in the index is a prime number it can only be shrunk into a single @@ -76,8 +76,8 @@ Indices can only be shrunk if they satisfy the following requirements: * The index must have more primary shards than the target index. * The number of primary shards in the target index must be a factor of the - number of primary shards in the source index. must have more primary shards - than the target index. + number of primary shards in the source index. The source index must have + more primary shards than the target index. * The index must not contain more than `2,147,483,519` documents in total across all shards that will be shrunk into a single shard on the target index diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index b03ed641de7..00f25434073 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -46,7 +46,6 @@ PUT _ingest/pipeline/my-pipeline-id "value": "bar" } } - // other processors ] } -------------------------------------------------- @@ -83,7 +82,6 @@ Example response: "value": "bar" } } - // other processors ] } } ] @@ -858,10 +856,10 @@ Processes elements in an array of unknown length. All processors can operate on elements inside an array, but if all elements of an array need to be processed in the same way, defining a processor for each element becomes cumbersome and tricky because it is likely that the number of elements in an array is unknown. For this reason the `foreach` -processor exists. By specifying the field holding array elements and a list of processors that -define what should happen to each element, array fields can easily be preprocessed. +processor exists. By specifying the field holding array elements and a processor that +defines what should happen to each element, array fields can easily be preprocessed. -Processors inside the foreach processor work in a different context, and the only valid top-level +A processor inside the foreach processor works in a different context, and the only valid top-level field is `_value`, which holds the array element value. Under this field other fields may exist. If the `foreach` processor fails to process an element inside the array, and no `on_failure` processor has been specified, @@ -873,7 +871,7 @@ then it aborts the execution and leaves the array unmodified. |====== | Name | Required | Default | Description | `field` | yes | - | The array field -| `processors` | yes | - | The processors +| `processor` | yes | - | The processor to execute against each field |====== Assume the following document: @@ -892,13 +890,11 @@ When this `foreach` processor operates on this sample document: { "foreach" : { "field" : "values", - "processors" : [ - { - "uppercase" : { - "field" : "_value" - } + "processor" : { + "uppercase" : { + "field" : "_value" } - ] + } } } -------------------------------------------------- @@ -938,13 +934,11 @@ so the following `foreach` processor is used: { "foreach" : { "field" : "persons", - "processors" : [ - { - "remove" : { - "field" : "_value.id" - } + "processor" : { + "remove" : { + "field" : "_value.id" } - ] + } } } -------------------------------------------------- @@ -977,21 +971,19 @@ block to send the document to the 'failure_index' index for later inspection: { "foreach" : { "field" : "persons", - "processors" : [ - { - "remove" : { - "field" : "_value.id", - "on_failure" : [ - { - "set" : { - "field", "_index", - "value", "failure_index" - } + "processor" : { + "remove" : { + "field" : "_value.id", + "on_failure" : [ + { + "set" : { + "field", "_index", + "value", "failure_index" } - ] - } + } + ] } - ] + } } } -------------------------------------------------- @@ -1010,7 +1002,7 @@ expression that supports aliased expressions that can be reused. This tool is perfect for syslog logs, apache and other webserver logs, mysql logs, and in general, any log format that is generally written for humans and not computer consumption. This processor comes packaged with over -https://github.com/elastic/elasticsearch/tree/master/modules/ingest-grok/src/main/resources/patterns[120 reusable patterns]. +https://github.com/elastic/elasticsearch/tree/master/modules/ingest-common/src/main/resources/patterns[120 reusable patterns]. If you need help building patterns to match your logs, you will find the and applications quite useful! diff --git a/docs/reference/mapping/params/analyzer.asciidoc b/docs/reference/mapping/params/analyzer.asciidoc index 7fc6c801e16..c075b662805 100644 --- a/docs/reference/mapping/params/analyzer.asciidoc +++ b/docs/reference/mapping/params/analyzer.asciidoc @@ -60,8 +60,6 @@ PUT /my_index } } -GET _cluster/health?wait_for_status=yellow - GET my_index/_analyze?field=text <3> { "text": "The quick Brown Foxes." diff --git a/docs/reference/mapping/params/fielddata.asciidoc b/docs/reference/mapping/params/fielddata.asciidoc index 92151c56d49..6795b0f5b9b 100644 --- a/docs/reference/mapping/params/fielddata.asciidoc +++ b/docs/reference/mapping/params/fielddata.asciidoc @@ -2,42 +2,105 @@ === `fielddata` Most fields are <> by default, which makes them -searchable. The inverted index allows queries to look up the search term in -unique sorted list of terms, and from that immediately have access to the list -of documents that contain the term. +searchable. Sorting, aggregations, and accessing field values in scripts, +however, requires a different access pattern from search. -Sorting, aggregations, and access to field values in scripts requires a -different data access pattern. Instead of lookup up the term and finding -documents, we need to be able to look up the document and find the terms that -it has in a field. +Search needs to answer the question _"Which documents contain this term?"_, +while sorting and aggregations need to answer a different question: _"What is +the value of this field for **this** document?"_. -Most fields can use index-time, on-disk <> to support -this type of data access pattern, but `text` fields do not support `doc_values`. +Most fields can use index-time, on-disk <> for this +data access pattern, but <> fields do not support `doc_values`. -Instead, `text` strings use a query-time data structure called +Instead, `text` fields use a query-time *in-memory* data structure called `fielddata`. This data structure is built on demand the first time that a -field is used for aggregations, sorting, or is accessed in a script. It is built -by reading the entire inverted index for each segment from disk, inverting the -term ↔︎ document relationship, and storing the result in memory, in the -JVM heap. +field is used for aggregations, sorting, or in a script. It is built by +reading the entire inverted index for each segment from disk, inverting the +term ↔︎ document relationship, and storing the result in memory, in the JVM +heap. -Loading fielddata is an expensive process so it is disabled by default. Also, -when enabled, once it has been loaded, it remains in memory for the lifetime of -the segment. +==== Fielddata is disabled on `text` fields by default -[WARNING] -.Fielddata can fill up your heap space -============================================================================== -Fielddata can consume a lot of heap space, especially when loading high -cardinality `text` fields. Most of the time, it doesn't make sense -to sort or aggregate on `text` fields (with the notable exception -of the -<> -aggregation). Always think about whether a <> field (which can -use `doc_values`) would be a better fit for your use case. -============================================================================== +Fielddata can consume a *lot* of heap space, especially when loading high +cardinality `text` fields. Once fielddata has been loaded into the heap, it +remains there for the lifetime of the segment. Also, loading fielddata is an +expensive process which can cause users to experience latency hits. This is +why fielddata is disabled by default. -TIP: The `fielddata.*` settings must have the same settings for fields of the +If you try to sort, aggregate, or access values from a script on a `text` +field, you will see this exception: + +[quote] +-- +Fielddata is disabled on text fields by default. Set `fielddata=true` on +[`your_field_name`] in order to load fielddata in memory by uninverting the +inverted index. Note that this can however use significant memory. +-- + +[[before-enabling-fielddata]] +==== Before enabling fielddata + +Before you enable fielddata, consider why you are using a `text` field for +aggregations, sorting, or in a script. It usually doesn't make sense to do +so. + +A text field is analyzed before indexing so that a value like +`New York` can be found by searching for `new` or for `york`. A `terms` +aggregation on this field will return a `new` bucket and a `york` bucket, when +you probably want a single bucket called `New York`. + +Instead, you should have a `text` field for full text searches, and an +unanalyzed <> field with <> +enabled for aggregations, as follows: + +[source,js] +--------------------------------- +PUT my_index +{ + "mappings": { + "my_type": { + "properties": { + "my_field": { <1> + "type": "text", + "fields": { + "keyword": { <2> + "type": "keyword" + } + } + } + } + } + } +} +--------------------------------- +// CONSOLE +<1> Use the `my_field` field for searches. +<2> Use the `my_field.keyword` field for aggregations, sorting, or in scripts. + +==== Enabling fielddata on `text` fields + +You can enable fielddata on an existing `text` field using the +<> as follows: + +[source,js] +----------------------------------- +PUT my_index/_mapping/my_type +{ + "properties": { + "my_field": { <1> + "type": "text", + "fielddata": true + } + } +} +----------------------------------- +// CONSOLE +// TEST[continued] + +<1> The mapping that you specify for `my_field` should consist of the existing + mapping for that field, plus the `fielddata` parameter. + +TIP: The `fielddata.*` parameter must have the same settings for fields of the same name in the same index. Its value can be updated on existing fields using the <>. @@ -49,12 +112,13 @@ using the <>. Global ordinals is a data-structure on top of fielddata and doc values, that maintains an incremental numbering for each unique term in a lexicographic order. Each term has a unique number and the number of term 'A' is lower than -the number of term 'B'. Global ordinals are only supported on string fields. +the number of term 'B'. Global ordinals are only supported on <> +and <> fields. -Fielddata and doc values also have ordinals, which is a unique numbering for all terms -in a particular segment and field. Global ordinals just build on top of this, -by providing a mapping between the segment ordinals and the global ordinals, -the latter being unique across the entire shard. +Fielddata and doc values also have ordinals, which is a unique numbering for +all terms in a particular segment and field. Global ordinals just build on top +of this, by providing a mapping between the segment ordinals and the global +ordinals, the latter being unique across the entire shard. Global ordinals are used for features that use segment ordinals, such as sorting and the terms aggregation, to improve the execution time. A terms @@ -68,10 +132,11 @@ which is different than for field data for a specific field which is tied to a single segment. For this reason global ordinals need to be entirely rebuilt whenever a once new segment becomes visible. -The loading time of global ordinals depends on the number of terms in a field, but in general -it is low, since it source field data has already been loaded. The memory overhead of global -ordinals is a small because it is very efficiently compressed. Eager loading of global ordinals -can move the loading time from the first search request, to the refresh itself. +The loading time of global ordinals depends on the number of terms in a field, +but in general it is low, since it source field data has already been loaded. +The memory overhead of global ordinals is a small because it is very +efficiently compressed. Eager loading of global ordinals can move the loading +time from the first search request, to the refresh itself. ***************************************** @@ -81,7 +146,7 @@ can move the loading time from the first search request, to the refresh itself. Fielddata filtering can be used to reduce the number of terms loaded into memory, and thus reduce memory usage. Terms can be filtered by _frequency_: -The frequency filter allows you to only load terms whose term frequency falls +The frequency filter allows you to only load terms whose document frequency falls between a `min` and `max` value, which can be expressed an absolute number (when the number is bigger than 1.0) or as a percentage (eg `0.01` is `1%` and `1.0` is `100%`). Frequency is calculated diff --git a/docs/reference/mapping/params/store.asciidoc b/docs/reference/mapping/params/store.asciidoc index d34d1016546..53cac7493ff 100644 --- a/docs/reference/mapping/params/store.asciidoc +++ b/docs/reference/mapping/params/store.asciidoc @@ -48,7 +48,7 @@ PUT my_index/my_type/1 GET my_index/_search { - "fields": [ "title", "date" ] <2> + "stored_fields": [ "title", "date" ] <2> } -------------------------------------------------- // CONSOLE diff --git a/docs/reference/mapping/types/numeric.asciidoc b/docs/reference/mapping/types/numeric.asciidoc index 7148585faf1..6fdc0c806a4 100644 --- a/docs/reference/mapping/types/numeric.asciidoc +++ b/docs/reference/mapping/types/numeric.asciidoc @@ -4,13 +4,14 @@ The following numeric types are supported: [horizontal] -`long`:: A signed 64-bit integer with a minimum value of +-2^63^+ and a maximum value of +2^63^-1+. -`integer`:: A signed 32-bit integer with a minimum value of +-2^31^+ and a maximum value of +2^31^-1+. -`short`:: A signed 16-bit integer with a minimum value of +-32,768+ and a maximum value of +32,767+. -`byte`:: A signed 8-bit integer with a minimum value of +-128+ and a maximum value of +127+. -`double`:: A double-precision 64-bit IEEE 754 floating point. -`float`:: A single-precision 32-bit IEEE 754 floating point. -`half_float`:: A half-precision 16-bit IEEE 754 floating point. +`long`:: A signed 64-bit integer with a minimum value of +-2^63^+ and a maximum value of +2^63^-1+. +`integer`:: A signed 32-bit integer with a minimum value of +-2^31^+ and a maximum value of +2^31^-1+. +`short`:: A signed 16-bit integer with a minimum value of +-32,768+ and a maximum value of +32,767+. +`byte`:: A signed 8-bit integer with a minimum value of +-128+ and a maximum value of +127+. +`double`:: A double-precision 64-bit IEEE 754 floating point. +`float`:: A single-precision 32-bit IEEE 754 floating point. +`half_float`:: A half-precision 16-bit IEEE 754 floating point. +`scaled_float`:: A floating point that is backed by a `long` and a fixed scaling factor. Below is an example of configuring a mapping with numeric fields: @@ -26,6 +27,10 @@ PUT my_index }, "time_in_seconds": { "type": "float" + }, + "price": { + "type": "scaled_float", + "scaling_factor": 100 } } } @@ -42,11 +47,24 @@ help indexing and searching be more efficient. Note however that given that storage is optimized based on the actual values that are stored, picking one type over another one will have no impact on storage requirements. -For floating-point types, picking the smallest type that is enough for the -use-case will still help indexing and searching be more efficient. However, -given that floating-point data is hard to compress, it might also have a -significant impact on storage requirements. Here is a table that compares the -3 floating-point types that are available in order to help make a decision. +For floating-point types, it is often more efficient to store floating-point +data into an integer using a scaling factor, which is what the `scaled_float` +type does under the hood. For instance, a `price` field could be stored in a +`scaled_float` with a `scaling_factor` of +100+. All APIs would work as if +the field was stored as a double, but under the hood elasticsearch would be +working with the number of cents, +price*100+, which is an integer. This is +mostly helpful to save disk space since integers are way easier to compress +than floating points. `scaled_float` is also fine to use in order to trade +accuracy for disk space. For instance imagine that you are tracking cpu +utilization as a number between +0+ and +1+. It usually does not matter much +whether cpu utilization is +12.7%+ or +13%+, so you could use a `scaled_float` +with a `scaling_factor` of +100+ in order to round cpu utilization to the +closest percent in order to save space. + +If `scaled_float` is not a good fit, then you should pick the smallest type +that is enough for the use-case among the floating-point types: `double`, +`float` and `half_float`. Here is a table that compares these types in order +to help make a decision. [cols="<,<,<,<",options="header",] |======================================================================= @@ -56,12 +74,6 @@ significant impact on storage requirements. Here is a table that compares the |`half_float`|+2^-24^+ |+65504+ |+11+ / +3.31+ |======================================================================= -When possible, it is often more efficient to store floating-point data into an -integer using a scaling factor. For instance, it is more efficient to store -percentages as integers between 0 and 100 than as floating-point numbers between 0 -and 1. Another example would be prices: it will be more efficient to store prices -as a number of cents, which is an integer, than as a floating-point number. - [[number-params]] ==== Parameters for numeric fields @@ -114,4 +126,19 @@ The following parameters are accepted by numeric types: the <> field. Accepts `true` or `false` (default). +[[scaled-float-params]] +==== Parameters for `scaled_float` +`scaled_float` accepts an additional parameter: + +[horizontal] + +`scaling_factor`:: + + The scaling factor to use when encoding values. Values will be multiplied + by this factor at index time and rounded to the closest long value. For + instance, a `scaled_float` with a `scaling_factor` of +10+ would internally + store +2.34+ as +23+ and all search-time operations (queries, aggregations, + sorting) will behave as if the document had a value of +2.3+. High values + of `scaling_factor` improve accuracy but also increase space requirements. + This parameter is required. diff --git a/docs/reference/mapping/types/percolator.asciidoc b/docs/reference/mapping/types/percolator.asciidoc index 4c7ff113fa9..ca8c8386e9b 100644 --- a/docs/reference/mapping/types/percolator.asciidoc +++ b/docs/reference/mapping/types/percolator.asciidoc @@ -71,11 +71,14 @@ a percolator query does not exist, it will be handled as a default string field fail. [float] -==== Important Notes +==== Limitations Because the `percolate` query is processing one document at a time, it doesn't support queries and filters that run against child documents such as `has_child` and `has_parent`. +The percolator doesn't accepts percolator queries containing `range` queries with ranges that are based on current +time (using `now`). + There are a number of queries that fetch data via a get call during query parsing. For example the `terms` query when using terms lookup, `template` query when using indexed scripts and `geo_shape` when using pre-indexed shapes. When these queries are indexed by the `percolator` field type then the get call is executed once. So each time the `percolator` diff --git a/docs/reference/migration/migrate_5_0/fs.asciidoc b/docs/reference/migration/migrate_5_0/fs.asciidoc index 859f3092823..42c8b4ddcea 100644 --- a/docs/reference/migration/migrate_5_0/fs.asciidoc +++ b/docs/reference/migration/migrate_5_0/fs.asciidoc @@ -23,3 +23,9 @@ behavior will be removed. If you are using a multi-cluster setup with both instances of Elasticsearch pointing to the same data path, you will need to add the cluster name to the data path so that different clusters do not overwrite data. + +==== Local files + +Prior to 5.0, nodes that were marked with both `node.data: false` and `node.master: false` (or the now removed `node.client: true`) +didn't write any files or folder to disk. 5.x added persistent node ids, requiring nodes to store that information. As such, all +node types will write a small state file to their data folders. \ No newline at end of file diff --git a/docs/reference/migration/migrate_5_0/java.asciidoc b/docs/reference/migration/migrate_5_0/java.asciidoc index 7d3a21b3197..7670f31c694 100644 --- a/docs/reference/migration/migrate_5_0/java.asciidoc +++ b/docs/reference/migration/migrate_5_0/java.asciidoc @@ -312,6 +312,11 @@ Removed the `getMemoryAvailable` method from `OsStats`, which could be previousl are now three options (NONE, IMMEDIATE, and WAIT_FOR). `setRefresh(IMMEDIATE)` has the same behavior as `setRefresh(true)` used to have. See `setRefreshPolicy`'s javadoc for more. +==== Remove properties support + +Some Java APIs (e.g., `IndicesAdminClient#setSettings`) would support Java properties syntax +(line-delimited key=value pairs). This support has been removed. + === Render Search Template Java API has been removed The Render Search Template Java API including `RenderSearchTemplateAction`, `RenderSearchTemplateRequest` and @@ -319,3 +324,12 @@ The Render Search Template Java API including `RenderSearchTemplateAction`, `Ren This Search Template API is now included in the `lang-mustache` module and the `simulate` flag must be set on the `SearchTemplateRequest` object. +==== AnalyzeRequest + +The `tokenFilters(String...)` and `charFilters(String...)` methods have been removed +in favor of using `addTokenFilter(String)`/`addTokenFilter(Map)` and `addCharFilter(String)`/`addCharFilter(Map)` each filters + +==== AnalyzeRequestBuilder + +The `setTokenFilters(String...)` and `setCharFilters(String...)` methods have been removed +in favor of using `addTokenFilter(String)`/`addTokenFilter(Map)` and `addCharFilter(String)`/`addCharFilter(Map)` each filters diff --git a/docs/reference/migration/migrate_5_0/mapping.asciidoc b/docs/reference/migration/migrate_5_0/mapping.asciidoc index fa5411cf426..dd467abe9e1 100644 --- a/docs/reference/migration/migrate_5_0/mapping.asciidoc +++ b/docs/reference/migration/migrate_5_0/mapping.asciidoc @@ -224,7 +224,7 @@ POST _reindex } --------------- // CONSOLE -// TEST[s/^/PUT oldindex\nGET _cluster\/health?wait_for_status=yellow\n/] +// TEST[s/^/PUT oldindex\n/] You can replace `_ttl` with time based index names (preferred) or by adding a cron job which runs a delete-by-query on a timestamp field in the source diff --git a/docs/reference/migration/migrate_5_0/packaging.asciidoc b/docs/reference/migration/migrate_5_0/packaging.asciidoc index 977e20a76b1..74faf3bb7d9 100644 --- a/docs/reference/migration/migrate_5_0/packaging.asciidoc +++ b/docs/reference/migration/migrate_5_0/packaging.asciidoc @@ -55,3 +55,11 @@ from Elasticsearch. Additionally, it was previously possible to set any setting in Elasticsearch via JVM system properties. This has been removed from Elasticsearch. + +==== Dying on fatal errors + +Previous versions of Elasticsearch would not halt the JVM if out of memory errors or other fatal +errors were encountered during the life of the Elasticsearch instance. Because such errors leave +the JVM in a questionable state, the best course of action is to halt the JVM when this occurs. +Starting in Elasticsearch 5.x, this is now the case. Operators should consider configuring their +Elasticsearch services so that they respawn automatically in the case of such a fatal crash. diff --git a/docs/reference/migration/migrate_5_0/percolator.asciidoc b/docs/reference/migration/migrate_5_0/percolator.asciidoc index ae2057bddfb..f173a0df958 100644 --- a/docs/reference/migration/migrate_5_0/percolator.asciidoc +++ b/docs/reference/migration/migrate_5_0/percolator.asciidoc @@ -48,6 +48,11 @@ the existing document. The percolate stats have been removed. This is because the percolator no longer caches the percolator queries. +==== Percolator queries containing range queries with now ranges + +The percolator no longer accepts percolator queries containing `range` queries with ranges that are based on current +time (using `now`). + ==== Java client The percolator is no longer part of the core elasticsearch dependency. It has moved to the percolator module. diff --git a/docs/reference/migration/migrate_5_0/plugins.asciidoc b/docs/reference/migration/migrate_5_0/plugins.asciidoc index e1ff497a8f3..b16d93ee982 100644 --- a/docs/reference/migration/migrate_5_0/plugins.asciidoc +++ b/docs/reference/migration/migrate_5_0/plugins.asciidoc @@ -140,3 +140,17 @@ remove their `onModule(ActionModule)` implementation. Plugins that register custom `RestHandler`s should implement `ActionPlugin` and remove their `onModule(NetworkModule)` implemnetation. + +==== SearchPlugin + +Plugins that register custom search time behavior (`Query`, `Suggester`, +`ScoreFunction`, `FetchSubPhase`, `Highlighter`, etc) should implement +`SearchPlugin` and remove their `onModule(SearchModule)` implementation. + +==== Mapper-Size plugin + +The metadata field `_size` is not accessible in aggregations, scripts and when +sorting for indices created in 2.x even if the index has been upgraded using the <> API. +If these features are needed in your application it is required to reindex the data with Elasticsearch 5.x. +The easiest way to reindex old indices is to use the `reindex` API, or the reindex UI provided by +the <>. diff --git a/docs/reference/migration/migrate_5_0/rest.asciidoc b/docs/reference/migration/migrate_5_0/rest.asciidoc index a9f70d2caae..9d135c4d5bf 100644 --- a/docs/reference/migration/migrate_5_0/rest.asciidoc +++ b/docs/reference/migration/migrate_5_0/rest.asciidoc @@ -74,3 +74,8 @@ The `PUT /_scripts/{lang}/{id}/_create` endpoint that previously allowed to crea The `PUT /_search/template/{id}/_create` endpoint that previously allowed to create indexed template has been removed. Indexed templates have been replaced by <>. + +==== Remove properties support + +Some REST endpoints (e.g., cluster update index settings) supported detecting content in the Java +properties format (line-delimited key=value pairs). This support has been removed. diff --git a/docs/reference/migration/migrate_5_0/scripting.asciidoc b/docs/reference/migration/migrate_5_0/scripting.asciidoc index f416984afa5..3b44f899f59 100644 --- a/docs/reference/migration/migrate_5_0/scripting.asciidoc +++ b/docs/reference/migration/migrate_5_0/scripting.asciidoc @@ -1,6 +1,187 @@ [[breaking_50_scripting]] === Script related changes +==== Removed 1.x script and template syntax + +The deprecated 1.x syntax of defining inline scripts / templates and referring to file or index base scripts / templates +have been removed. + +The `script` and `params` string parameters can no longer be used and instead the `script` object syntax must be used. +This applies for the update api, script sort, `script_score` function, `script` query, `scripted_metric` aggregation and +`script_heuristic` aggregation. + +So this usage of inline scripts is no longer allowed: + +[source,js] +----------------------------------- +{ + "script_score": { + "lang": "groovy", + "script": "Math.log(_score * 2) + my_modifier", + "params": { + "my_modifier": 8 + } + } +} +----------------------------------- + +and instead this syntax must be used: + +[source,js] +----------------------------------- +{ + "script_score": { + "script": { + "lang": "groovy", + "inline": "Math.log(_score * 2) + my_modifier", + "params": { + "my_modifier": 8 + } + } + } +} +----------------------------------- + +The `script` or `script_file` parameter can no longer be used to refer to file based scripts and templates and instead +`file` must be used. + +This usage of referring to file based scripts is no longer valid: + +[source,js] +----------------------------------- +{ + "script_score": { + "script": "calculate-score", + "params": { + "my_modifier": 8 + } + } +} +----------------------------------- + +This usage is valid: + +[source,js] +----------------------------------- +{ + "script_score": { + "script": { + "lang": "groovy", + "file": "calculate-score", + "params": { + "my_modifier": 8 + } + } + } +} +----------------------------------- + +The `script_id` parameter can no longer be used the refer to indexed based scripts and templates and instead `id` must +be used. + +This usage of referring to indexed scripts is no longer valid: + +[source,js] +----------------------------------- +{ + "script_score": { + "script_id": "indexedCalculateScore", + "params": { + "my_modifier": 8 + } + } +} +----------------------------------- + +This usage is valid: + +[source,js] +----------------------------------- +{ + "script_score": { + "script": { + "id": "indexedCalculateScore", + "lang" : "groovy", + "params": { + "my_modifier": 8 + } + } + } +} +----------------------------------- + +==== Template query + +The `query` field in the `template` query can no longer be used. +This 1.x syntax can no longer be used: + +[source,js] +----------------------------------- +{ + "query": { + "template": { + "query": {"match_{{template}}": {}}, + "params" : { + "template" : "all" + } + } + } +} +----------------------------------- + +and instead the following syntax should be used: + +[source,js] +----------------------------------- +{ + "query": { + "template": { + "inline": {"match_{{template}}": {}}, + "params" : { + "template" : "all" + } + } + } +} +----------------------------------- + +==== Search templates + +The top level `template` field in the search template api has been replaced with consistent template / script object +syntax. This 1.x syntax can no longer be used: + +[source,js] +----------------------------------- +{ + "template" : { + "query": { "match" : { "{{my_field}}" : "{{my_value}}" } }, + "size" : "{{my_size}}" + }, + "params" : { + "my_field" : "foo", + "my_value" : "bar", + "my_size" : 5 + } +} +----------------------------------- + +and instead the following syntax should be used: + +[source,js] +----------------------------------- +{ + "inline" : { + "query": { "match" : { "{{my_field}}" : "{{my_value}}" } }, + "size" : "{{my_size}}" + }, + "params" : { + "my_field" : "foo", + "my_value" : "bar", + "my_size" : 5 + } +} +----------------------------------- + ==== Indexed scripts and templates Indexed scripts and templates have been replaced by <> @@ -13,8 +194,8 @@ If scripts are really large, other options like native scripts should be conside Previously indexed scripts in the `.scripts` index will not be used any more as Elasticsearch will now try to fetch the scripts from the cluster state. Upon upgrading to 5.x the `.scripts` index will remain to exist, so it can be used by a script to migrate -the stored scripts from the `.scripts` index into the cluster state. The format of the scripts -hasn't changed. +the stored scripts from the `.scripts` index into the cluster state. The current format of the scripts +and templates hasn't been changed, only the 1.x format has been removed. ===== Python migration script @@ -104,3 +285,28 @@ engine doing this was the Javascript engine, which registered "js" and The Javascript engine previously registered "js" and "javascript". It now only registers the "js" file extension for on-disk scripts. + +==== Removed scripting query string parameters from update rest api + +The `script`, `script_id` and `scripting_upsert` query string parameters have been removed from the update api. + +==== Java transport client + +The `TemplateQueryBuilder` has been moved to the `lang-mustache` module. +Therefor when using the `TemplateQueryBuilder` from the Java native client the +lang-mustache module should be on the classpath. Also the transport client +should load the lang-mustache module as plugin: + +[source,java] +-------------------------------------------------- +TransportClient transportClient = TransportClient.builder() + .settings(Settings.builder().put("node.name", "node")) + .addPlugin(MustachePlugin.class) + .build(); +transportClient.addTransportAddress( + new InetSocketTransportAddress(new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 9300)) +); +-------------------------------------------------- + +Also the helper methods in `QueryBuilders` class that create a `TemplateQueryBuilder` instance have been removed, +instead the constructors on `TemplateQueryBuilder` should be used. \ No newline at end of file diff --git a/docs/reference/migration/migrate_5_0/search.asciidoc b/docs/reference/migration/migrate_5_0/search.asciidoc index 72b29e8a9e7..09478ee3251 100644 --- a/docs/reference/migration/migrate_5_0/search.asciidoc +++ b/docs/reference/migration/migrate_5_0/search.asciidoc @@ -64,11 +64,15 @@ characteristics as the former `scan` search type. ==== `fields` parameter -The `fields` parameter used to try to retrieve field values from stored -fields, and fall back to extracting from the `_source` if a field is not -marked as stored. Now, the `fields` parameter will only return stored fields +The `fields` parameter has been replaced by `stored_fields`. +The `stored_fields` parameter will only return stored fields -- it will no longer extract values from the `_source`. +==== `fielddata_fields` parameter + +The `fielddata_fields` has been deprecated, use parameter `docvalue_fields` instead. + + ==== search-exists API removed The search exists api has been removed in favour of using the search api with diff --git a/docs/reference/migration/migrate_5_0/settings.asciidoc b/docs/reference/migration/migrate_5_0/settings.asciidoc index ffe69aa3cfb..da0a5776ad0 100644 --- a/docs/reference/migration/migrate_5_0/settings.asciidoc +++ b/docs/reference/migration/migrate_5_0/settings.asciidoc @@ -26,6 +26,11 @@ should be used instead. The `name` setting has been removed and is replaced by `node.name`. Usage of `-Dname=some_node_name` is not supported anymore. +The `node.add_id_to_custom_path` was renamed to `add_lock_id_to_custom_path`. + +The settings `node.mode` and `node.local` are removed. Local mode should be configured via +`discovery.type: local` and `transport.type:local`. In order to disable _http_ please use `http.enabled: false` + ==== Node attribute settings Node level attributes used for allocation filtering, forced awareness or other node identification / grouping @@ -64,6 +69,10 @@ pick the first interface not marked as loopback. Instead, specify by address scope (e.g. `_local_,_site_` for all loopback and private network addresses) or by explicit interface names, hostnames, or addresses. +The `netty.epollBugWorkaround` settings is removed. This settings allow people to enable +a netty work around for https://github.com/netty/netty/issues/327[a high CPU usage issue] with early JVM versions. +This bug was http://bugs.java.com/view_bug.do?bug_id=6403933[fixed in Java 7]. Since Elasticsearch 5.0 requires Java 8 the settings is removed. Note that if the workaround needs to be reintroduced you can still set the `org.jboss.netty.epollBugWorkaround` system property to control Netty directly. + ==== Forbid changing of thread pool types Previously, <> could be dynamically @@ -227,6 +236,11 @@ Elasticsearch could previously be configured on the command line by setting settings via `--name.of.setting value.of.setting`. This feature has been removed. Instead, use `-Ename.of.setting=value.of.setting`. +==== Remove support for .properties config files + +The Elasticsearch configuration and logging configuration can no longer be stored in the Java +properties file format (line-delimited key=value pairs with a `.properties` extension). + ==== Discovery Settings The `discovery.zen.minimum_master_node` must be set for nodes that have diff --git a/docs/reference/modules/cluster.asciidoc b/docs/reference/modules/cluster.asciidoc index 5d7bac10917..c4b64452927 100644 --- a/docs/reference/modules/cluster.asciidoc +++ b/docs/reference/modules/cluster.asciidoc @@ -7,7 +7,7 @@ cluster. There are a number of settings available to control the shard allocation process: -* <> lists the settings to control the allocation an +* <> lists the settings to control the allocation and rebalancing operations. * <> explains how Elasticsearch takes available disk space diff --git a/docs/reference/modules/indices/request_cache.asciidoc b/docs/reference/modules/indices/request_cache.asciidoc index b1ce608cbf2..cf2406388ce 100644 --- a/docs/reference/modules/indices/request_cache.asciidoc +++ b/docs/reference/modules/indices/request_cache.asciidoc @@ -15,7 +15,7 @@ results from older indices will be served directly from the cache. [IMPORTANT] =================================== -For now, the requests cache will only cache the results of search requests +By default, the requests cache will only cache the results of search requests where `size=0`, so it will not cache `hits`, but it will cache `hits.total`, <>, and <>. @@ -98,6 +98,10 @@ IMPORTANT: If your query uses a script whose result is not deterministic (e.g. it uses a random function or references the current time) you should set the `request_cache` flag to `false` to disable caching for that request. +Requests `size` is greater than 0 will not be cached even if the request cache is +enabled in the index settings. To cache these requests you will need to use the +query-string parameter detailed here. + [float] ==== Cache key diff --git a/docs/reference/modules/network.asciidoc b/docs/reference/modules/network.asciidoc index 6657e98869f..d6118246b50 100644 --- a/docs/reference/modules/network.asciidoc +++ b/docs/reference/modules/network.asciidoc @@ -115,7 +115,7 @@ The publish host is the single interface that the node advertises to other nodes in the cluster, so that those nodes can connect to it. Currently an elasticsearch node may be bound to multiple addresses, but only publishes one. If not specified, this defaults to the ``best'' address from -`network.bind_host`, sorted by IPv4/IPv6 stack preference, then by +`network.host`, sorted by IPv4/IPv6 stack preference, then by reachability. Both of the above settings can be configured just like `network.host` -- they diff --git a/docs/reference/modules/node.asciidoc b/docs/reference/modules/node.asciidoc index 2f1caa42ad8..29379a2d99f 100644 --- a/docs/reference/modules/node.asciidoc +++ b/docs/reference/modules/node.asciidoc @@ -82,6 +82,9 @@ cluster health to have a stable master node. Any master-eligible node (all nodes by default) may be elected to become the master node by the <>. +IMPORTANT: Master nodes must have access to the `data/` directory (just like +`data` nodes) as this is where the cluster state is persisted between node restarts. + Indexing and searching your data is CPU-, memory-, and I/O-intensive work which can put pressure on a node's resources. To ensure that your master node is stable and not under pressure, it is a good idea in a bigger diff --git a/docs/reference/modules/scripting/painless.asciidoc b/docs/reference/modules/scripting/painless.asciidoc index 93fb136913b..ad36cdd6df4 100644 --- a/docs/reference/modules/scripting/painless.asciidoc +++ b/docs/reference/modules/scripting/painless.asciidoc @@ -143,7 +143,7 @@ First, let's look at the source data for a player by submitting the following re ---------------------------------------------------------------- GET hockey/_search { - "fields": [ + "stored_fields": [ "_id", "_source" ], diff --git a/docs/reference/modules/scripting/using.asciidoc b/docs/reference/modules/scripting/using.asciidoc index e50c737fd95..0db2afbb3b6 100644 --- a/docs/reference/modules/scripting/using.asciidoc +++ b/docs/reference/modules/scripting/using.asciidoc @@ -73,6 +73,7 @@ specified as `file` unless you first adjust the default variables. [IMPORTANT] +[[prefer-params]] .Prefer parameters ======================================== diff --git a/docs/reference/query-dsl/function-score-query.asciidoc b/docs/reference/query-dsl/function-score-query.asciidoc index c6477b78d8b..b1b6b56c2b2 100644 --- a/docs/reference/query-dsl/function-score-query.asciidoc +++ b/docs/reference/query-dsl/function-score-query.asciidoc @@ -68,7 +68,7 @@ GET /_search NOTE: The scores produced by the filtering query of each function do not matter. -If no query is given with a function this is equivalent to specifying +If no filter is given with a function this is equivalent to specifying `"match_all": {}` First, each document is scored by the defined functions. The parameter diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 90492dee33e..ea535263b7d 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -5,12 +5,14 @@ -- This section summarizes the changes in each release. +* <> * <> * <> * <> * <> -- +include::release-notes/5.0.0-alpha4.asciidoc[] include::release-notes/5.0.0-alpha3.asciidoc[] include::release-notes/5.0.0-alpha2.asciidoc[] include::release-notes/5.0.0-alpha1.asciidoc[] diff --git a/docs/reference/search/field-stats.asciidoc b/docs/reference/search/field-stats.asciidoc index 4c38363bee9..90dbd539d65 100644 --- a/docs/reference/search/field-stats.asciidoc +++ b/docs/reference/search/field-stats.asciidoc @@ -311,10 +311,10 @@ curl -XPOST "http://localhost:9200/_field_stats?level=indices" -d '{ "fields" : ["answer_count"] <1> "index_constraints" : { <2> "creation_date" : { <3> - "min_value" : { <4> + "max_value" : { <4> "gte" : "2014-01-01T00:00:00.000Z" }, - "max_value" : { + "min_value" : { <4> "lt" : "2015-01-01T00:00:00.000Z" } } @@ -325,7 +325,7 @@ curl -XPOST "http://localhost:9200/_field_stats?level=indices" -d '{ <1> The fields to compute and return field stats for. <2> The set index constraints. Note that index constrains can be defined for fields that aren't defined in the `fields` option. <3> Index constraints for the field `creation_date`. -<4> An index constraint on the `min_value` property of a field statistic. +<4> Index constraints on the `max_value` and `min_value` property of a field statistic. For a field, index constraints can be defined on the `min_value` statistic, `max_value` statistic or both. Each index constraint support the following comparisons: @@ -345,11 +345,11 @@ curl -XPOST "http://localhost:9200/_field_stats?level=indices" -d '{ "fields" : ["answer_count"] "index_constraints" : { "creation_date" : { - "min_value" : { + "max_value" : { "gte" : "2014-01-01", "format" : "date_optional_time" <1> }, - "max_value" : { + "min_value" : { "lt" : "2015-01-01", "format" : "date_optional_time" } diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 150b1b93a36..e2c22caf6f4 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -40,14 +40,14 @@ This will yield the following result: "failed": 0 }, "hits": { - "total": 1, - "max_score": 1, + "total": 3, + "max_score": 1.078072, "hits": [ ... ] <1> }, "profile": { "shards": [ { - "id": "[htuC6YnSSSmKFq5UBt0YMA][test][0]", + "id": "[2aE02wS1R8q_QFnYu6vDVQ][test][1]", "searches": [ { "query": [ @@ -56,12 +56,18 @@ This will yield the following result: "description": "message:search message:test", "time": "15.52889800ms", "breakdown": { - "score": 0, - "next_doc": 24495, + "score": 6352, + "score_count": 1, + "build_scorer": 1800776, + "build_scorer_count": 1, "match": 0, - "create_weight": 8488388, - "build_scorer": 7016015, - "advance": 0 + "match_count": 0, + "create_weight": 667400, + "create_weight_count": 1, + "next_doc": 10563, + "next_doc_count": 2, + "advance": 0, + "advance_count": 0 }, "children": [ { @@ -70,11 +76,17 @@ This will yield the following result: "time": "4.938855000ms", "breakdown": { "score": 0, - "next_doc": 18332, + "score_count": 0, + "build_scorer": 3230, + "build_scorer_count": 1, "match": 0, - "create_weight": 2945570, - "build_scorer": 1974953, - "advance": 0 + "match_count": 0, + "create_weight": 415612, + "create_weight_count": 1, + "next_doc": 0, + "next_doc_count": 0, + "advance": 0, + "advance_count": 0 } }, { @@ -82,30 +94,36 @@ This will yield the following result: "description": "message:test", "time": "0.5016660000ms", "breakdown": { - "score": 0, - "next_doc": 0, + "score": 5014, + "score_count": 1, + "build_scorer": 1689333, + "build_scorer_count": 1, "match": 0, - "create_weight": 170534, - "build_scorer": 331132, - "advance": 0 + "match_count": 0, + "create_weight": 166587, + "create_weight_count": 1, + "next_doc": 5542, + "next_doc_count": 2, + "advance": 0, + "advance_count": 0 } } ] } ], - "rewrite_time": 185002, + "rewrite_time": 870954, "collector": [ { "name": "SimpleTopScoreDocCollector", "reason": "search_top_hits", - "time": "2.206529000ms" + "time": "0.009783000000ms" } ] } ] } ] - } + } } -------------------------------------------------- <1> Search results are returned, but were omitted here for brevity @@ -121,11 +139,11 @@ First, the overall structure of the profile response is as follows: "profile": { "shards": [ { - "id": "[htuC6YnSSSmKFq5UBt0YMA][test][0]", <1> + "id": "[2aE02wS1R8q_QFnYu6vDVQ][test][1]", <1> "searches": [ { "query": [...], <2> - "rewrite_time": 185002, <3> + "rewrite_time": 870954, <3> "collector": [...] <4> } ], @@ -227,13 +245,18 @@ The `"breakdown"` component lists detailed timing statistics about low-level Luc [source,js] -------------------------------------------------- "breakdown": { - "score": 0, - "next_doc": 24495, - "match": 0, - "create_weight": 8488388, - "build_scorer": 7016015, - "advance": 0 - + "score": 5014, + "score_count": 1, + "build_scorer": 1689333, + "build_scorer_count": 1, + "match": 0, + "match_count": 0, + "create_weight": 166587, + "create_weight_count": 1, + "next_doc": 5542, + "next_doc_count": 2, + "advance": 0, + "advance_count": 0 } -------------------------------------------------- @@ -305,6 +328,10 @@ The meaning of the stats are as follows: This records the time taken to score a particular document via it's Scorer +`*_count`:: + Records the number of invocations of the particular method. For example, `"next_doc_count": 2,` + means the `nextDoc()` method was called on two different documents. This can be used to help judge + how selective queries are, by comparing counts between different query components. ==== `collectors` Section @@ -473,11 +500,17 @@ And the response: "time": "0.4094560000ms", "breakdown": { "score": 0, + "score_count": 1, "next_doc": 0, + "next_doc_count": 2, "match": 0, + "match_count": 0, "create_weight": 31584, + "create_weight_count": 1, "build_scorer": 377872, + "build_scorer_count": 1, "advance": 0 + "advance_count": 0 } }, { @@ -486,11 +519,17 @@ And the response: "time": "0.3037020000ms", "breakdown": { "score": 0, + "score_count": 1, "next_doc": 5936, + "next_doc_count": 2, "match": 0, + "match_count": 0, "create_weight": 185215, + "create_weight_count": 1, "build_scorer": 112551, + "build_scorer_count": 1, "advance": 0 + "advance_count": 0 } } ], @@ -530,11 +569,17 @@ And the response: "time": "0.04829300000ms", "breakdown": { "score": 0, + "score_count": 1, "next_doc": 3672, + "next_doc_count": 2, "match": 0, + "match_count": 0, "create_weight": 6311, + "create_weight_count": 1, "build_scorer": 38310, + "build_scorer_count": 1, "advance": 0 + "advance_count": 0 } } ], @@ -627,9 +672,13 @@ Which yields the following aggregation profile output "time": "4280.456978ms", "breakdown": { "reduce": 0, + "reduce_count": 0, "build_aggregation": 49765, + "build_aggregation_count": 300, "initialise": 52785, - "collect": 3155490036 + "initialize_count": 300, + "collect": 3155490036, + "collect_count": 1800 }, "children": [ { @@ -638,9 +687,13 @@ Which yields the following aggregation profile output "time": "1124.864392ms", "breakdown": { "reduce": 0, + "reduce_count": 0, "build_aggregation": 1394, + "build_aggregation_count": 150, "initialise": 2883, - "collect": 1124860115 + "initialize_count": 150, + "collect": 1124860115, + "collect_count": 900 } } ] @@ -666,9 +719,13 @@ The `"breakdown"` component lists detailed timing statistics about low-level Luc -------------------------------------------------- "breakdown": { "reduce": 0, + "reduce_count": 0, "build_aggregation": 49765, + "build_aggregation_count": 300, "initialise": 52785, - "collect": 3155490036 + "initialize_count": 300, + "collect": 3155490036, + "collect_count": 1800 } -------------------------------------------------- @@ -699,6 +756,10 @@ The meaning of the stats are as follows: This is not currently used and will always report `0`. Currently aggregation profiling only times the shard level parts of the aggregation execution. Timing of the reduce phase will be added later. +`*_count`:: + Records the number of invocations of the particular method. For example, `"collect_count": 2,` + means the `collect()` method was called on two different documents. + === Profiling Considerations ==== Performance Notes diff --git a/docs/reference/search/request-body.asciidoc b/docs/reference/search/request-body.asciidoc index 958320ea110..a9adc157bd3 100644 --- a/docs/reference/search/request-body.asciidoc +++ b/docs/reference/search/request-body.asciidoc @@ -143,11 +143,11 @@ include::request/sort.asciidoc[] include::request/source-filtering.asciidoc[] -include::request/fields.asciidoc[] +include::request/stored-fields.asciidoc[] include::request/script-fields.asciidoc[] -include::request/fielddata-fields.asciidoc[] +include::request/docvalue-fields.asciidoc[] include::request/post-filter.asciidoc[] diff --git a/docs/reference/search/request/docvalue-fields.asciidoc b/docs/reference/search/request/docvalue-fields.asciidoc new file mode 100644 index 00000000000..b4d2493d853 --- /dev/null +++ b/docs/reference/search/request/docvalue-fields.asciidoc @@ -0,0 +1,23 @@ +[[search-request-docvalue-fields]] +=== Doc value Fields + +Allows to return the <> representation of a field for each hit, for +example: + +[source,js] +-------------------------------------------------- +GET /_search +{ + "query" : { + "match_all": {} + }, + "docvalue_fields" : ["test1", "test2"] +} +-------------------------------------------------- +// CONSOLE + +Doc value fields can work on fields that are not stored. + +Note that if the fields parameter specifies fields without docvalues it will try to load the value from the fielddata cache +causing the terms for that field to be loaded to memory (cached), which will result in more memory consumption. + diff --git a/docs/reference/search/request/fielddata-fields.asciidoc b/docs/reference/search/request/fielddata-fields.asciidoc deleted file mode 100644 index f3a3508b144..00000000000 --- a/docs/reference/search/request/fielddata-fields.asciidoc +++ /dev/null @@ -1,23 +0,0 @@ -[[search-request-fielddata-fields]] -=== Field Data Fields - -Allows to return the <> representation of a field for each hit, for -example: - -[source,js] --------------------------------------------------- -GET /_search -{ - "query" : { - "match_all": {} - }, - "fielddata_fields" : ["test1", "test2"] -} --------------------------------------------------- -// CONSOLE - -Field data fields can work on fields that are not stored. - -It's important to understand that using the `fielddata_fields` parameter will -cause the terms for that field to be loaded to memory (cached), which will -result in more memory consumption. diff --git a/docs/reference/search/request/highlighting.asciidoc b/docs/reference/search/request/highlighting.asciidoc index c235c37b338..efb7053c179 100644 --- a/docs/reference/search/request/highlighting.asciidoc +++ b/docs/reference/search/request/highlighting.asciidoc @@ -372,7 +372,7 @@ query and the rescore query in `highlight_query`. -------------------------------------------------- GET /_search { - "fields": [ "_id" ], + "stored_fields": [ "_id" ], "query" : { "match": { "content": { diff --git a/docs/reference/search/request/inner-hits.asciidoc b/docs/reference/search/request/inner-hits.asciidoc index 345bc9abde2..3c8e0e9f00e 100644 --- a/docs/reference/search/request/inner-hits.asciidoc +++ b/docs/reference/search/request/inner-hits.asciidoc @@ -72,7 +72,7 @@ Inner hits also supports the following per document features: * <> * <> * <> -* <> +* <> * <> [[nested-inner-hits]] diff --git a/docs/reference/search/request/sort.asciidoc b/docs/reference/search/request/sort.asciidoc index d0a50f0ebfd..85c5d1e675f 100644 --- a/docs/reference/search/request/sort.asciidoc +++ b/docs/reference/search/request/sort.asciidoc @@ -15,7 +15,7 @@ PUT /my_index "my_type": { "properties": { "post_date": { "type": "date" }, - "user": { + "user": { "type": "keyword" }, "name": { @@ -26,8 +26,6 @@ PUT /my_index } } } - -GET /_cluster/health?wait_for_status=yellow -------------------------------------------------- // CONSOLE @@ -100,7 +98,7 @@ PUT /my_index/my_type/1?refresh "price": [20, 4] } -POST /_search +POST /_search { "query" : { "term" : { "product" : "chocolate" } diff --git a/docs/reference/search/request/fields.asciidoc b/docs/reference/search/request/stored-fields.asciidoc similarity index 91% rename from docs/reference/search/request/fields.asciidoc rename to docs/reference/search/request/stored-fields.asciidoc index 3483d470ee2..3d5b8c01b47 100644 --- a/docs/reference/search/request/fields.asciidoc +++ b/docs/reference/search/request/stored-fields.asciidoc @@ -1,7 +1,7 @@ [[search-request-fields]] === Fields -WARNING: The `fields` parameter is about fields that are explicitly marked as +WARNING: The `stored_fields` parameter is about fields that are explicitly marked as stored in the mapping, which is off by default and generally not recommended. Use <> instead to select subsets of the original source document to be returned. @@ -13,7 +13,7 @@ by a search hit. -------------------------------------------------- GET /_search { - "fields" : ["user", "postDate"], + "stored_fields" : ["user", "postDate"], "query" : { "term" : { "user" : "kimchy" } } @@ -30,7 +30,7 @@ returned, for example: -------------------------------------------------- GET /_search { - "fields" : [], + "stored_fields" : [], "query" : { "term" : { "user" : "kimchy" } } diff --git a/docs/reference/search/uri-request.asciidoc b/docs/reference/search/uri-request.asciidoc index 496f04ea4af..ba36992f6fb 100644 --- a/docs/reference/search/uri-request.asciidoc +++ b/docs/reference/search/uri-request.asciidoc @@ -83,7 +83,7 @@ hits was computed. part of the document by using `_source_include` & `_source_exclude` (see the <> documentation for more details) -|`fields` |The selective stored fields of the document to return for each hit, +|`stored_fields` |The selective stored fields of the document to return for each hit, comma delimited. Not specifying any value will cause no fields to return. |`sort` |Sorting to perform. Can either be in the form of `fieldName`, or diff --git a/docs/reference/setup.asciidoc b/docs/reference/setup.asciidoc index a883b0cc543..ae3c5b3beb4 100644 --- a/docs/reference/setup.asciidoc +++ b/docs/reference/setup.asciidoc @@ -47,3 +47,5 @@ include::setup/bootstrap-checks.asciidoc[] include::setup/sysconfig.asciidoc[] include::setup/upgrade.asciidoc[] + +include::setup/stopping.asciidoc[] diff --git a/docs/reference/setup/configuration.asciidoc b/docs/reference/setup/configuration.asciidoc index 52bf5ffcbff..68f73fc96b8 100644 --- a/docs/reference/setup/configuration.asciidoc +++ b/docs/reference/setup/configuration.asciidoc @@ -26,7 +26,7 @@ setting, as follows: [source,sh] ------------------------------- -./bin/elasticsearch -Ees.path.conf=/path/to/my/config/ +./bin/elasticsearch -Epath.conf=/path/to/my/config/ ------------------------------- [float] diff --git a/docs/reference/setup/stopping.asciidoc b/docs/reference/setup/stopping.asciidoc new file mode 100644 index 00000000000..45a3b122a90 --- /dev/null +++ b/docs/reference/setup/stopping.asciidoc @@ -0,0 +1,58 @@ +[[stopping-elasticsearch]] +== Stopping Elasticsearch + +An orderly shutdown of Elasticsearch ensures that Elasticsearch has a chance to cleanup and close +outstanding resources. For example, a node that is shutdown in an orderly fashion will remove itself +from the cluster, sync translogs to disk, and perform other related cleanup activities. You can help +ensure an orderly shutdown by properly stopping Elasticsearch. + +If you're running Elasticsearch as a service, you can stop Elasticsearch via the service management +functionality provided by your installation. + +If you're running Elasticsearch directly, you can stop Elasticsearch by sending control-C if you're +running Elasticsearch in the console, or by sending `SIGTERM` to the Elasticsearch process on a +POSIX system. You can obtain the PID to send the signal to via various tools (e.g., `ps` or `jps`): + +[source,sh] +-------------------------------------------------- +$ jps | grep Elasticsearch +14542 Elasticsearch +-------------------------------------------------- + +From the Elasticsearch startup logs: + +[source,sh] +-------------------------------------------------- +[2016-07-07 12:26:18,908][INFO ][node ] [Reaper] version[5.0.0-alpha4], pid[15399], build[3f5b994/2016-06-27T16:23:46.861Z], OS[Mac OS X/10.11.5/x86_64], JVM[Oracle Corporation/Java HotSpot(TM) 64-Bit Server VM/1.8.0_92/25.92-b14] +-------------------------------------------------- + +Or by specifying a location to write a PID file to on startup (`-p `): + +[source,sh] +-------------------------------------------------- +$ ./bin/elasticsearch -p /tmp/elasticsearch-pid -d +$ cat /tmp/elasticsearch-pid && echo +15516 +$ kill -SIGTERM 15516 +-------------------------------------------------- + +[[fatal-errors] +[float] +=== Stopping on Fatal Errors + +During the life of the Elasticsearch virtual machine, certain fatal errors could arise that put the +virtual machine in a questionable state. Such fatal errors include out of memory errors, internal +errors in virtual machine, and serious I/O errors. + +When Elasticsearch detects that the virtual machine has encountered such a fatal error Elasticsearch +will attempt to log the error and then will halt the virtual machine. When Elasticsearch initiates +such a shutdown, it does not go through an orderly shutdown as described above. The Elasticsearch +process will also return with a special status code indicating the nature of the error. + +[horizontal] +JVM internal error:: 128 +Out of memory error:: 127 +Stack overflow error:: 126 +Unknown virtual machine error:: 125 +Serious I/O error:: 124 +Unknown fatal error:: 1 diff --git a/docs/resiliency/index.asciidoc b/docs/resiliency/index.asciidoc index 6f3ed169709..9f8a22c7337 100644 --- a/docs/resiliency/index.asciidoc +++ b/docs/resiliency/index.asciidoc @@ -55,6 +55,14 @@ If you encounter an issue, https://github.com/elastic/elasticsearch/issues[pleas We are committed to tracking down and fixing all the issues that are posted. +[float] +==== Jepsen Tests + +The Jepsen platform is specifically designed to test distributed systems. It is not a single test and is regularly adapted +to create new scenarios. We have currently ported all published Jepsen scenarios that deal with loss of acknowledged writes to our testing +framework. As the Jepsen tests evolve, we will continue porting new scenarios that are not covered yet. We are committed to investigating +all new scenarios and will report issues that we find on this page and in our GitHub repository. + [float] === Better request retry mechanism when nodes are disconnected (STATUS: ONGOING) @@ -94,6 +102,19 @@ space. The following issues have been identified: Other safeguards are tracked in the meta-issue {GIT}11511[#11511]. +[float] +=== The _version field may not uniquely identify document content during a network partition (STATUS: ONGOING) + +When a primary has been partitioned away from the cluster there is a short period of time until it detects this. During that time it will continue +indexing writes locally, thereby updating document versions. When it tries to replicate the operation, however, it will discover that it is +partitioned away. It won't acknowledge the write and will wait until the partition is resolved to negotiate with the master on how to proceed. +The master will decide to either fail any replicas which failed to index the operations on the primary or tell the primary that it has to +step down because a new primary has been chosen in the meantime. Since the old primary has already written documents, clients may already have read from +the old primary before it shuts itself down. The version numbers of these reads may not be unique if the new primary has already accepted +writes for the same document (see {GIT}19269[#19269]). + +We are currently implementing Sequence numbers {GIT}10708[#10708] which better track primary changes. Sequence numbers thus provide a basis +for uniquely identifying writes even in the presence of network partitions and will replace `_version` in operations that require this. [float] === Relocating shards omitted by reporting infrastructure (STATUS: ONGOING) @@ -102,17 +123,33 @@ Indices stats and indices segments requests reach out to all nodes that have sha while the stats request arrives will make that part of the request fail and are just ignored in the overall stats result. {GIT}13719[#13719] [float] -=== Jepsen Test Failures (STATUS: ONGOING) +=== Documentation of guarantees and handling of failures (STATUS: ONGOING) -We have increased our test coverage to include scenarios tested by Jepsen. We make heavy use of randomization to expand on the scenarios that can be tested and to introduce new error conditions. You can follow the work on the master branch of the https://github.com/elastic/elasticsearch/blob/master/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java[`DiscoveryWithServiceDisruptionsIT` class], where we will add more tests as time progresses. +This status page is a start, but we can do a better job of explicitly documenting the processes at work in Elasticsearch and what happens +in the case of each type of failure. The plan is to have a test case that validates each behavior under simulated conditions. Every test + will document the expected results, the associated test code, and an explicit PASS or FAIL status for each simulated case. [float] -=== Document guarantees and handling of failure (STATUS: ONGOING) +=== Run Jepsen (STATUS: ONGOING) + +We have ported the known scenarios in the Jepsen blogs that check loss of acknowledged writes to our testing infrastructure. +The new tests are run continuously in our testing farm and are passing. We are also working on running Jepsen independently to verify +that no failures are found. -This status page is a start, but we can do a better job of explicitly documenting the processes at work in Elasticsearch, and what happens in the case of each type of failure. The plan is to have a test case that validates each behavior under simulated conditions. Every test will document the expected results, the associated test code and an explicit PASS or FAIL status for each simulated case. == Unreleased +[float] +=== Port Jepsen tests dealing with loss of acknowledged writes to our testing framework (STATUS: UNRELEASED, V5.0.0) + +We have increased our test coverage to include scenarios tested by Jepsen that demonstrate loss of acknowledged writes, as described in +the Elasticsearch related blogs. We make heavy use of randomization to expand on the scenarios that can be tested and to introduce +new error conditions. +You can follow the work on the master branch of the +https://github.com/elastic/elasticsearch/blob/master/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java[`DiscoveryWithServiceDisruptionsIT` class], +where the `testAckedIndexing` test was specifically added to check that we don't lose acknowledged writes in various failure scenarios. + + [float] === Loss of documents during network partition (STATUS: UNRELEASED, v5.0.0) diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationPlugin.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationPlugin.java index 077c3d3d31a..0b636b7abe2 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationPlugin.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/MatrixAggregationPlugin.java @@ -21,17 +21,14 @@ package org.elasticsearch.search.aggregations.matrix; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.SearchModule.AggregationSpec; import org.elasticsearch.search.aggregations.matrix.stats.InternalMatrixStats; import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregationBuilder; import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsParser; -import java.io.IOException; - public class MatrixAggregationPlugin extends Plugin { - public void onModule(SearchModule searchModule) { - InternalMatrixStats.registerStreams(); - searchModule.registerAggregation(MatrixStatsAggregationBuilder::new, new MatrixStatsParser(), - MatrixStatsAggregationBuilder.AGGREGATION_NAME_FIELD); + searchModule.registerAggregation(new AggregationSpec(MatrixStatsAggregationBuilder::new, new MatrixStatsParser(), + MatrixStatsAggregationBuilder.AGGREGATION_NAME_FIELD).addResultReader(InternalMatrixStats::new)); } } diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java index edef75389c8..d97fb64a16d 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.aggregations.matrix.stats; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.aggregations.AggregationStreams; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.InternalMetricsAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -31,32 +30,16 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; +import static java.util.Collections.emptyMap; + /** * Computes distribution statistics over multiple fields */ public class InternalMatrixStats extends InternalMetricsAggregation implements MatrixStats { - - public final static Type TYPE = new Type("matrix_stats"); - public final static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() { - @Override - public InternalMatrixStats readResult(StreamInput in) throws IOException { - InternalMatrixStats result = new InternalMatrixStats(); - result.readFrom(in); - return result; - } - }; - - public static void registerStreams() { - AggregationStreams.registerStream(STREAM, TYPE.stream()); - } - /** per shard stats needed to compute stats */ - protected RunningStats stats; + private final RunningStats stats; /** final result */ - protected MatrixStatsResults results; - - protected InternalMatrixStats() { - } + private final MatrixStatsResults results; /** per shard ctor */ protected InternalMatrixStats(String name, long count, RunningStats multiFieldStatsResults, MatrixStatsResults results, @@ -67,9 +50,24 @@ public class InternalMatrixStats extends InternalMetricsAggregation implements M this.results = results; } + /** + * Read from a stream. + */ + public InternalMatrixStats(StreamInput in) throws IOException { + super(in); + stats = in.readOptionalWriteable(RunningStats::new); + results = in.readOptionalWriteable(MatrixStatsResults::new); + } + @Override - public Type type() { - return TYPE; + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeOptionalWriteable(stats); + out.writeOptionalWriteable(results); + } + + @Override + public String getWriteableName() { + return MatrixStatsAggregationBuilder.NAME; } /** get the number of documents */ @@ -197,7 +195,7 @@ public class InternalMatrixStats extends InternalMetricsAggregation implements M } else if (path.size() == 1) { String element = path.get(0); if (results == null) { - results = new MatrixStatsResults(); + return emptyMap(); } switch (element) { case "counts": @@ -222,22 +220,6 @@ public class InternalMatrixStats extends InternalMetricsAggregation implements M } } - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - // write running stats - out.writeOptionalWriteable(stats); - // write results - out.writeOptionalWriteable(results); - } - - @Override - protected void doReadFrom(StreamInput in) throws IOException { - // read stats count - stats = in.readOptionalWriteable(RunningStats::new); - // read count - results = in.readOptionalWriteable(MatrixStatsResults::new); - } - @Override public InternalAggregation doReduce(List aggregations, ReduceContext reduceContext) { // merge stats across all shards diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java index 5c6c3767177..e32c7d3ad73 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.InternalAggregation.Type; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.MultiValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSource; @@ -37,24 +38,23 @@ import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; import java.io.IOException; import java.util.Map; -/** - */ public class MatrixStatsAggregationBuilder extends MultiValuesSourceAggregationBuilder.LeafOnly { - public static final String NAME = InternalMatrixStats.TYPE.name(); + public static final String NAME = "matrix_stats"; + public static final Type TYPE = new Type(NAME); public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME); private MultiValueMode multiValueMode = MultiValueMode.AVG; public MatrixStatsAggregationBuilder(String name) { - super(name, InternalMatrixStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); + super(name, TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } /** * Read from a stream. */ public MatrixStatsAggregationBuilder(StreamInput in) throws IOException { - super(in, InternalMatrixStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); + super(in, TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC); } @Override diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java index 96b7b74ab4e..1ae29e65761 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java @@ -34,9 +34,9 @@ import java.util.Map; */ class MatrixStatsResults implements Writeable { /** object holding results - computes results in place */ - final protected RunningStats results; + protected final RunningStats results; /** pearson product correlation coefficients */ - final protected Map> correlation; + protected final Map> correlation; /** Base ctor */ public MatrixStatsResults() { diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java index 51e5ce1cf27..e3aa171fe3d 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceAggregationBuilder.java @@ -52,7 +52,7 @@ public abstract class MultiValuesSourceAggregationBuilder> + public abstract static class LeafOnly> extends MultiValuesSourceAggregationBuilder { protected LeafOnly(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) { diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParser.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParser.java index dd2b69696f3..0de5e13c058 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParser.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/support/MultiValuesSourceParser.java @@ -165,8 +165,8 @@ public abstract class MultiValuesSourceParser implement return factory; } - private final void parseMissingAndAdd(final String aggregationName, final String currentFieldName, - XContentParser parser, final Map missing) throws IOException { + private void parseMissingAndAdd(final String aggregationName, final String currentFieldName, + XContentParser parser, final Map missing) throws IOException { XContentParser.Token token = parser.currentToken(); if (token == null) { token = parser.nextToken(); diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/BaseMatrixStatsTestCase.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/BaseMatrixStatsTestCase.java index b1296bb1146..2e4fa4313bd 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/BaseMatrixStatsTestCase.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/BaseMatrixStatsTestCase.java @@ -34,8 +34,8 @@ public abstract class BaseMatrixStatsTestCase extends ESTestCase { protected final ArrayList fieldA = new ArrayList<>(numObs); protected final ArrayList fieldB = new ArrayList<>(numObs); protected final MultiPassStats actualStats = new MultiPassStats(); - protected final static String fieldAKey = "fieldA"; - protected final static String fieldBKey = "fieldB"; + protected static final String fieldAKey = "fieldA"; + protected static final String fieldBKey = "fieldB"; @Before public void setup() { diff --git a/modules/build.gradle b/modules/build.gradle index 558163a420a..11131c28e23 100644 --- a/modules/build.gradle +++ b/modules/build.gradle @@ -18,6 +18,7 @@ */ subprojects { + group = 'org.elasticsearch.plugin' // for modules which publish client jars apply plugin: 'elasticsearch.esplugin' esplugin { diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java index d9ffd34cac1..079ff73846a 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AbstractStringProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.Map; @@ -53,7 +53,7 @@ abstract class AbstractStringProcessor extends AbstractProcessor { protected abstract String process(String value); - static abstract class Factory extends AbstractProcessorFactory { + abstract static class Factory implements Processor.Factory { protected final String processorType; protected Factory(String processorType) { @@ -61,11 +61,12 @@ abstract class AbstractStringProcessor extends AbstractProcessor { } @Override - public T doCreate(String processorTag, Map config) throws Exception { - String field = ConfigurationUtils.readStringProperty(processorType, processorTag, config, "field"); - return newProcessor(processorTag, field); + public AbstractStringProcessor create(Map registry, String tag, + Map config) throws Exception { + String field = ConfigurationUtils.readStringProperty(processorType, tag, config, "field"); + return newProcessor(tag, field); } - protected abstract T newProcessor(String processorTag, String field); + protected abstract AbstractStringProcessor newProcessor(String processorTag, String field); } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AppendProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AppendProcessor.java index af163c3c187..85cb8acbc06 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AppendProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AppendProcessor.java @@ -20,9 +20,10 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; +import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.ValueSource; @@ -64,7 +65,7 @@ public final class AppendProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { private final TemplateService templateService; @@ -73,9 +74,12 @@ public final class AppendProcessor extends AbstractProcessor { } @Override - public AppendProcessor doCreate(String processorTag, Map config) throws Exception { + public AppendProcessor create(Map registry, String processorTag, + Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); Object value = ConfigurationUtils.readObject(TYPE, processorTag, config, "value"); + TemplateService.Template compiledTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, + "field", field, templateService); return new AppendProcessor(processorTag, templateService.compile(field), ValueSource.wrap(value, templateService)); } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java index 015c56c72c3..c85e8d17a00 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ConvertProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.ArrayList; import java.util.List; @@ -93,7 +93,7 @@ public final class ConvertProcessor extends AbstractProcessor { }; @Override - public final String toString() { + public String toString() { return name().toLowerCase(Locale.ROOT); } @@ -160,9 +160,10 @@ public final class ConvertProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - public ConvertProcessor doCreate(String processorTag, Map config) throws Exception { + public ConvertProcessor create(Map registry, String processorTag, + Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String typeProperty = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "type"); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", field); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java index a94d4d048a8..6ed5f0b66cf 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java @@ -19,16 +19,6 @@ package org.elasticsearch.ingest.common; -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; -import org.elasticsearch.ingest.ConfigurationUtils; -import org.elasticsearch.ingest.IngestDocument; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; -import org.joda.time.format.DateTimeFormatter; - import java.util.ArrayList; import java.util.Collections; import java.util.IllformedLocaleException; @@ -37,6 +27,16 @@ import java.util.Locale; import java.util.Map; import java.util.function.Function; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.ingest.AbstractProcessor; +import org.elasticsearch.ingest.ConfigurationUtils; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; + public final class DateIndexNameProcessor extends AbstractProcessor { public static final String TYPE = "date_index_name"; @@ -120,10 +120,11 @@ public final class DateIndexNameProcessor extends AbstractProcessor { return dateFormats; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - protected DateIndexNameProcessor doCreate(String tag, Map config) throws Exception { + public DateIndexNameProcessor create(Map registry, String tag, + Map config) throws Exception { String localeString = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, "locale"); String timezoneString = ConfigurationUtils.readOptionalStringProperty(TYPE, tag, config, "timezone"); DateTimeZone timezone = timezoneString == null ? DateTimeZone.UTC : DateTimeZone.forID(timezoneString); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java index b82b9c8b76c..676cf2e7419 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java @@ -21,9 +21,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.ISODateTimeFormat; @@ -108,10 +108,11 @@ public final class DateProcessor extends AbstractProcessor { return formats; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @SuppressWarnings("unchecked") - public DateProcessor doCreate(String processorTag, Map config) throws Exception { + public DateProcessor create(Map registry, String processorTag, + Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field", DEFAULT_TARGET_FIELD); String timezoneString = ConfigurationUtils.readOptionalStringProperty(TYPE, processorTag, config, "timezone"); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/FailProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/FailProcessor.java index 6c434d85d5a..7dbdedaca08 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/FailProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/FailProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.TemplateService; import java.util.Map; @@ -56,7 +56,7 @@ public final class FailProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { private final TemplateService templateService; @@ -65,9 +65,12 @@ public final class FailProcessor extends AbstractProcessor { } @Override - public FailProcessor doCreate(String processorTag, Map config) throws Exception { + public FailProcessor create(Map registry, String processorTag, + Map config) throws Exception { String message = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "message"); - return new FailProcessor(processorTag, templateService.compile(message)); + TemplateService.Template compiledTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, + "message", message, templateService); + return new FailProcessor(processorTag, compiledTemplate); } } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java index b6d14d1b8c5..c4640733d06 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java @@ -20,19 +20,22 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; -import org.elasticsearch.ingest.ProcessorsRegistry; +import org.elasticsearch.ingest.Processor; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; +import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; import static org.elasticsearch.ingest.ConfigurationUtils.readList; +import static org.elasticsearch.ingest.ConfigurationUtils.readMap; import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty; /** @@ -46,12 +49,12 @@ public final class ForEachProcessor extends AbstractProcessor { public static final String TYPE = "foreach"; private final String field; - private final List processors; + private final Processor processor; - ForEachProcessor(String tag, String field, List processors) { + ForEachProcessor(String tag, String field, Processor processor) { super(tag); this.field = field; - this.processors = processors; + this.processor = processor; } @Override @@ -62,9 +65,7 @@ public final class ForEachProcessor extends AbstractProcessor { Map innerSource = new HashMap<>(ingestDocument.getSourceAndMetadata()); innerSource.put("_value", value); // scalar value to access the list item being evaluated IngestDocument innerIngestDocument = new IngestDocument(innerSource, ingestDocument.getIngestMetadata()); - for (Processor processor : processors) { - processor.execute(innerIngestDocument); - } + processor.execute(innerIngestDocument); newValues.add(innerSource.get("_value")); } ingestDocument.setFieldValue(field, newValues); @@ -79,24 +80,23 @@ public final class ForEachProcessor extends AbstractProcessor { return field; } - List getProcessors() { - return processors; + Processor getProcessor() { + return processor; } - public static final class Factory extends AbstractProcessorFactory { - - private final ProcessorsRegistry processorRegistry; - - public Factory(ProcessorsRegistry processorRegistry) { - this.processorRegistry = processorRegistry; - } - + public static final class Factory implements Processor.Factory { @Override - protected ForEachProcessor doCreate(String tag, Map config) throws Exception { + public ForEachProcessor create(Map factories, String tag, + Map config) throws Exception { String field = readStringProperty(TYPE, tag, config, "field"); - List>> processorConfigs = readList(TYPE, tag, config, "processors"); - List processors = ConfigurationUtils.readProcessorConfigs(processorConfigs, processorRegistry); - return new ForEachProcessor(tag, field, Collections.unmodifiableList(processors)); + Map> processorConfig = readMap(TYPE, tag, config, "processor"); + Set>> entries = processorConfig.entrySet(); + if (entries.size() != 1) { + throw newConfigurationException(TYPE, tag, "processor", "Must specify exactly one processor type"); + } + Map.Entry> entry = entries.iterator().next(); + Processor processor = ConfigurationUtils.readProcessor(factories, entry.getKey(), entry.getValue()); + return new ForEachProcessor(tag, field, processor); } } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java index aa9fbb905cc..44528bdac82 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.HashMap; import java.util.List; @@ -114,7 +114,7 @@ public final class GrokProcessor extends AbstractProcessor { return combinedPattern; } - public final static class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { private final Map builtinPatterns; @@ -123,7 +123,8 @@ public final class GrokProcessor extends AbstractProcessor { } @Override - public GrokProcessor doCreate(String processorTag, Map config) throws Exception { + public GrokProcessor create(Map registry, String processorTag, + Map config) throws Exception { String matchField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); List matchPatterns = ConfigurationUtils.readList(TYPE, processorTag, config, "patterns"); boolean traceMatch = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "trace_match", false); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GsubProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GsubProcessor.java index 72bc9e76710..36bbfaf7142 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GsubProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GsubProcessor.java @@ -20,8 +20,8 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.Map; import java.util.regex.Matcher; @@ -78,9 +78,10 @@ public final class GsubProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - public GsubProcessor doCreate(String processorTag, Map config) throws Exception { + public GsubProcessor create(Map registry, String processorTag, + Map config) throws Exception { String field = readStringProperty(TYPE, processorTag, config, "field"); String pattern = readStringProperty(TYPE, processorTag, config, "pattern"); String replacement = readStringProperty(TYPE, processorTag, config, "replacement"); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java index 60bfdd37a9f..c89f6164de7 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java @@ -19,9 +19,6 @@ package org.elasticsearch.ingest.common; -import org.elasticsearch.node.NodeModule; -import org.elasticsearch.plugins.Plugin; - import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; @@ -31,9 +28,11 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; -public class IngestCommonPlugin extends Plugin { +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.plugins.IngestPlugin; +import org.elasticsearch.plugins.Plugin; - public static final String NAME = "ingest-common"; +public class IngestCommonPlugin extends Plugin implements IngestPlugin { private final Map builtinPatterns; @@ -41,26 +40,28 @@ public class IngestCommonPlugin extends Plugin { this.builtinPatterns = loadBuiltinPatterns(); } - public void onModule(NodeModule nodeModule) { - nodeModule.registerProcessor(DateProcessor.TYPE, (registry) -> new DateProcessor.Factory()); - nodeModule.registerProcessor(SetProcessor.TYPE, (registry) -> new SetProcessor.Factory(registry.getTemplateService())); - nodeModule.registerProcessor(AppendProcessor.TYPE, (registry) -> new AppendProcessor.Factory(registry.getTemplateService())); - nodeModule.registerProcessor(RenameProcessor.TYPE, (registry) -> new RenameProcessor.Factory()); - nodeModule.registerProcessor(RemoveProcessor.TYPE, (registry) -> new RemoveProcessor.Factory(registry.getTemplateService())); - nodeModule.registerProcessor(SplitProcessor.TYPE, (registry) -> new SplitProcessor.Factory()); - nodeModule.registerProcessor(JoinProcessor.TYPE, (registry) -> new JoinProcessor.Factory()); - nodeModule.registerProcessor(UppercaseProcessor.TYPE, (registry) -> new UppercaseProcessor.Factory()); - nodeModule.registerProcessor(LowercaseProcessor.TYPE, (registry) -> new LowercaseProcessor.Factory()); - nodeModule.registerProcessor(TrimProcessor.TYPE, (registry) -> new TrimProcessor.Factory()); - nodeModule.registerProcessor(ConvertProcessor.TYPE, (registry) -> new ConvertProcessor.Factory()); - nodeModule.registerProcessor(GsubProcessor.TYPE, (registry) -> new GsubProcessor.Factory()); - nodeModule.registerProcessor(FailProcessor.TYPE, (registry) -> new FailProcessor.Factory(registry.getTemplateService())); - nodeModule.registerProcessor(ForEachProcessor.TYPE, (registry) -> new ForEachProcessor.Factory(registry)); - nodeModule.registerProcessor(DateIndexNameProcessor.TYPE, (registry) -> new DateIndexNameProcessor.Factory()); - nodeModule.registerProcessor(SortProcessor.TYPE, (registry) -> new SortProcessor.Factory()); - nodeModule.registerProcessor(GrokProcessor.TYPE, (registry) -> new GrokProcessor.Factory(builtinPatterns)); - nodeModule.registerProcessor(ScriptProcessor.TYPE, (registry) -> - new ScriptProcessor.Factory(registry.getScriptService())); + @Override + public Map getProcessors(Processor.Parameters parameters) { + Map processors = new HashMap<>(); + processors.put(DateProcessor.TYPE, new DateProcessor.Factory()); + processors.put(SetProcessor.TYPE, new SetProcessor.Factory(parameters.templateService)); + processors.put(AppendProcessor.TYPE, new AppendProcessor.Factory(parameters.templateService)); + processors.put(RenameProcessor.TYPE, new RenameProcessor.Factory()); + processors.put(RemoveProcessor.TYPE, new RemoveProcessor.Factory(parameters.templateService)); + processors.put(SplitProcessor.TYPE, new SplitProcessor.Factory()); + processors.put(JoinProcessor.TYPE, new JoinProcessor.Factory()); + processors.put(UppercaseProcessor.TYPE, new UppercaseProcessor.Factory()); + processors.put(LowercaseProcessor.TYPE, new LowercaseProcessor.Factory()); + processors.put(TrimProcessor.TYPE, new TrimProcessor.Factory()); + processors.put(ConvertProcessor.TYPE, new ConvertProcessor.Factory()); + processors.put(GsubProcessor.TYPE, new GsubProcessor.Factory()); + processors.put(FailProcessor.TYPE, new FailProcessor.Factory(parameters.templateService)); + processors.put(ForEachProcessor.TYPE, new ForEachProcessor.Factory()); + processors.put(DateIndexNameProcessor.TYPE, new DateIndexNameProcessor.Factory()); + processors.put(SortProcessor.TYPE, new SortProcessor.Factory()); + processors.put(GrokProcessor.TYPE, new GrokProcessor.Factory(builtinPatterns)); + processors.put(ScriptProcessor.TYPE, new ScriptProcessor.Factory(parameters.scriptService)); + return Collections.unmodifiableMap(processors); } // Code for loading built-in grok patterns packaged with the jar file: diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JoinProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JoinProcessor.java index 8114d20f28f..6b4327f726d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JoinProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JoinProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.List; import java.util.Map; @@ -70,9 +70,10 @@ public final class JoinProcessor extends AbstractProcessor { return TYPE; } - public final static class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - public JoinProcessor doCreate(String processorTag, Map config) throws Exception { + public JoinProcessor create(Map registry, String processorTag, + Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String separator = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "separator"); return new JoinProcessor(processorTag, field, separator); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java index 9f8ea7a5614..a0ae8e13158 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/LowercaseProcessor.java @@ -44,7 +44,7 @@ public final class LowercaseProcessor extends AbstractStringProcessor { return TYPE; } - public final static class Factory extends AbstractStringProcessor.Factory { + public static final class Factory extends AbstractStringProcessor.Factory { public Factory() { super(TYPE); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java index 98c4e18a408..b381eed723d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.TemplateService; import java.util.Map; @@ -55,7 +55,7 @@ public final class RemoveProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { private final TemplateService templateService; @@ -64,9 +64,12 @@ public final class RemoveProcessor extends AbstractProcessor { } @Override - public RemoveProcessor doCreate(String processorTag, Map config) throws Exception { + public RemoveProcessor create(Map registry, String processorTag, + Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); - return new RemoveProcessor(processorTag, templateService.compile(field)); + TemplateService.Template compiledTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, + "field", field, templateService); + return new RemoveProcessor(processorTag, compiledTemplate); } } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java index 9143321c4aa..d6c655fd5ce 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.Map; @@ -75,9 +75,10 @@ public final class RenameProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - public RenameProcessor doCreate(String processorTag, Map config) throws Exception { + public RenameProcessor create(Map registry, String processorTag, + Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field"); return new RenameProcessor(processorTag, field, targetField); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java index e4881366165..4799d6a50fe 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java @@ -24,8 +24,8 @@ import java.util.Map; import org.elasticsearch.common.Strings; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; @@ -77,7 +77,7 @@ public final class ScriptProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { private final ScriptService scriptService; @@ -86,7 +86,8 @@ public final class ScriptProcessor extends AbstractProcessor { } @Override - public ScriptProcessor doCreate(String processorTag, Map config) throws Exception { + public ScriptProcessor create(Map registry, String processorTag, + Map config) throws Exception { String field = readOptionalStringProperty(TYPE, processorTag, config, "field"); String lang = readStringProperty(TYPE, processorTag, config, "lang"); String inline = readOptionalStringProperty(TYPE, processorTag, config, "inline"); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SetProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SetProcessor.java index a78701645a9..1ee3ad0509f 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SetProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SetProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.ValueSource; @@ -75,7 +75,7 @@ public final class SetProcessor extends AbstractProcessor { return TYPE; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { private final TemplateService templateService; @@ -84,13 +84,16 @@ public final class SetProcessor extends AbstractProcessor { } @Override - public SetProcessor doCreate(String processorTag, Map config) throws Exception { + public SetProcessor create(Map registry, String processorTag, + Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); Object value = ConfigurationUtils.readObject(TYPE, processorTag, config, "value"); boolean overrideEnabled = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "override", true); + TemplateService.Template compiledTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, + "field", field, templateService); return new SetProcessor( processorTag, - templateService.compile(field), + compiledTemplate, ValueSource.wrap(value, templateService), overrideEnabled); } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SortProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SortProcessor.java index 706a1cef9c5..411b22adef0 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SortProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SortProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.Collections; import java.util.List; @@ -111,10 +111,11 @@ public final class SortProcessor extends AbstractProcessor { return TYPE; } - public final static class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - public SortProcessor doCreate(String processorTag, Map config) throws Exception { + public SortProcessor create(Map registry, String processorTag, + Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, FIELD); try { SortOrder direction = SortOrder.fromString( diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SplitProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SplitProcessor.java index f7c5e8befc4..8ff841ef0e4 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SplitProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SplitProcessor.java @@ -20,9 +20,9 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.util.ArrayList; import java.util.Collections; @@ -72,9 +72,10 @@ public final class SplitProcessor extends AbstractProcessor { return TYPE; } - public static class Factory extends AbstractProcessorFactory { + public static class Factory implements Processor.Factory { @Override - public SplitProcessor doCreate(String processorTag, Map config) throws Exception { + public SplitProcessor create(Map registry, String processorTag, + Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); return new SplitProcessor(processorTag, field, ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "separator")); } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TrimProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TrimProcessor.java index a57a25125d6..e852f887da0 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TrimProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/TrimProcessor.java @@ -41,7 +41,7 @@ public final class TrimProcessor extends AbstractStringProcessor { return TYPE; } - public static final class Factory extends AbstractStringProcessor.Factory { + public static final class Factory extends AbstractStringProcessor.Factory { public Factory() { super(TYPE); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java index a5c817352a1..5585a130eaf 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/UppercaseProcessor.java @@ -43,7 +43,7 @@ public final class UppercaseProcessor extends AbstractStringProcessor { return TYPE; } - public static final class Factory extends AbstractStringProcessor.Factory { + public static final class Factory extends AbstractStringProcessor.Factory { public Factory() { super(TYPE); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java index fbf77cc4285..e70bc3434ee 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorFactoryTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.ingest.common; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -52,8 +52,7 @@ public class AppendProcessorFactoryTests extends ESTestCase { } config.put("value", value); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - AppendProcessor appendProcessor = factory.create(config); + AppendProcessor appendProcessor = factory.create(null, processorTag, config); assertThat(appendProcessor.getTag(), equalTo(processorTag)); assertThat(appendProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); assertThat(appendProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo(value)); @@ -63,7 +62,7 @@ public class AppendProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("value", "value1"); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); @@ -74,7 +73,7 @@ public class AppendProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[value] required property is missing")); @@ -86,10 +85,21 @@ public class AppendProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("value", null); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[value] required property is missing")); } } + + public void testInvalidMustacheTemplate() throws Exception { + AppendProcessor.Factory factory = new AppendProcessor.Factory(TestTemplateService.instance(true)); + Map config = new HashMap<>(); + config.put("field", "field1"); + config.put("value", "value1"); + String processorTag = randomAsciiOfLength(10); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); + assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); + assertThat(exception.getHeader("processor_tag").get(0), equalTo(processorTag)); + } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java index 1ec5362af14..bc04053f907 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ConvertProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -39,8 +38,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("type", type.toString()); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - ConvertProcessor convertProcessor = factory.create(config); + ConvertProcessor convertProcessor = factory.create(null, processorTag, config); assertThat(convertProcessor.getTag(), equalTo(processorTag)); assertThat(convertProcessor.getField(), equalTo("field1")); assertThat(convertProcessor.getTargetField(), equalTo("field1")); @@ -54,7 +52,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("type", type); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), Matchers.equalTo("[type] type [" + type + "] not supported, cannot convert field.")); @@ -70,7 +68,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { String type = "type-" + randomAsciiOfLengthBetween(1, 10); config.put("type", type); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), Matchers.equalTo("[field] required property is missing")); @@ -82,7 +80,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), Matchers.equalTo("[type] required property is missing")); @@ -97,8 +95,7 @@ public class ConvertProcessorFactoryTests extends ESTestCase { config.put("target_field", "field2"); config.put("type", type.toString()); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - ConvertProcessor convertProcessor = factory.create(config); + ConvertProcessor convertProcessor = factory.create(null, processorTag, config); assertThat(convertProcessor.getTag(), equalTo(processorTag)); assertThat(convertProcessor.getField(), equalTo("field1")); assertThat(convertProcessor.getTargetField(), equalTo("field2")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java index 42877236b88..3b9e2121c95 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java @@ -36,7 +36,7 @@ public class DateIndexNameFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("date_rounding", "y"); - DateIndexNameProcessor processor = factory.create(config); + DateIndexNameProcessor processor = factory.create(null, null, config); assertThat(processor.getDateFormats().size(), Matchers.equalTo(1)); assertThat(processor.getField(), Matchers.equalTo("_field")); assertThat(processor.getIndexNamePrefix(), Matchers.equalTo("")); @@ -53,7 +53,7 @@ public class DateIndexNameFactoryTests extends ESTestCase { config.put("date_rounding", "y"); config.put("date_formats", Arrays.asList("UNIX", "UNIX_MS")); - DateIndexNameProcessor processor = factory.create(config); + DateIndexNameProcessor processor = factory.create(null, null, config); assertThat(processor.getDateFormats().size(), Matchers.equalTo(2)); config = new HashMap<>(); @@ -62,7 +62,7 @@ public class DateIndexNameFactoryTests extends ESTestCase { config.put("date_rounding", "y"); config.put("index_name_format", "yyyyMMdd"); - processor = factory.create(config); + processor = factory.create(null, null, config); assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyyMMdd")); config = new HashMap<>(); @@ -71,7 +71,7 @@ public class DateIndexNameFactoryTests extends ESTestCase { config.put("date_rounding", "y"); config.put("timezone", "+02:00"); - processor = factory.create(config); + processor = factory.create(null, null, config); assertThat(processor.getTimezone(), Matchers.equalTo(DateTimeZone.forOffsetHours(2))); config = new HashMap<>(); @@ -79,7 +79,7 @@ public class DateIndexNameFactoryTests extends ESTestCase { config.put("index_name_prefix", "_prefix"); config.put("date_rounding", "y"); - processor = factory.create(config); + processor = factory.create(null, null, config); assertThat(processor.getIndexNamePrefix(), Matchers.equalTo("_prefix")); } @@ -87,12 +87,12 @@ public class DateIndexNameFactoryTests extends ESTestCase { DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(); Map config = new HashMap<>(); config.put("date_rounding", "y"); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); assertThat(e.getMessage(), Matchers.equalTo("[field] required property is missing")); config.clear(); config.put("field", "_field"); - e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); assertThat(e.getMessage(), Matchers.equalTo("[date_rounding] required property is missing")); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorFactoryTests.java index 65dcdf6082c..95ad68bb110 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTimeZone; @@ -42,8 +41,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("field", sourceField); config.put("formats", Collections.singletonList("dd/MM/yyyyy")); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - DateProcessor processor = factory.create(config); + DateProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo(sourceField)); assertThat(processor.getTargetField(), equalTo(DateProcessor.DEFAULT_TARGET_FIELD)); @@ -60,7 +58,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("formats", Collections.singletonList("dd/MM/yyyyy")); try { - factory.create(config); + factory.create(null, null, config); fail("processor creation should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("[field] required property is missing")); @@ -76,7 +74,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("target_field", targetField); try { - factory.create(config); + factory.create(null, null, config); fail("processor creation should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("[formats] required property is missing")); @@ -92,7 +90,7 @@ public class DateProcessorFactoryTests extends ESTestCase { Locale locale = randomLocale(random()); config.put("locale", locale.toLanguageTag()); - DateProcessor processor = factory.create(config); + DateProcessor processor = factory.create(null, null, config); assertThat(processor.getLocale().toLanguageTag(), equalTo(locale.toLanguageTag())); } @@ -104,7 +102,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("formats", Collections.singletonList("dd/MM/yyyyy")); config.put("locale", "invalid_locale"); try { - factory.create(config); + factory.create(null, null, config); fail("should fail with invalid locale"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("Invalid language tag specified: invalid_locale")); @@ -120,7 +118,7 @@ public class DateProcessorFactoryTests extends ESTestCase { DateTimeZone timezone = randomDateTimeZone(); config.put("timezone", timezone.getID()); - DateProcessor processor = factory.create(config); + DateProcessor processor = factory.create(null, null, config); assertThat(processor.getTimezone(), equalTo(timezone)); } @@ -132,7 +130,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("match_formats", Collections.singletonList("dd/MM/yyyyy")); config.put("timezone", "invalid_timezone"); try { - factory.create(config); + factory.create(null, null, config); fail("invalid timezone should fail"); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), equalTo("The datetime zone id 'invalid_timezone' is not recognised")); @@ -146,7 +144,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("field", sourceField); config.put("formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")); - DateProcessor processor = factory.create(config); + DateProcessor processor = factory.create(null, null, config); assertThat(processor.getFormats(), equalTo(Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy"))); } @@ -158,7 +156,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("formats", "dd/MM/yyyy"); try { - factory.create(config); + factory.create(null, null, config); fail("processor creation should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("[formats] property isn't a list, but of type [java.lang.String]")); @@ -174,7 +172,7 @@ public class DateProcessorFactoryTests extends ESTestCase { config.put("target_field", targetField); config.put("formats", Arrays.asList("dd/MM/yyyy", "dd-MM-yyyy")); - DateProcessor processor = factory.create(config); + DateProcessor processor = factory.create(null, null, config); assertThat(processor.getTargetField(), equalTo(targetField)); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java index db16b78b316..217a15cf5b3 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/FailProcessorFactoryTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.ingest.common; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -44,8 +44,7 @@ public class FailProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("message", "error"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - FailProcessor failProcessor = factory.create(config); + FailProcessor failProcessor = factory.create(null, processorTag, config); assertThat(failProcessor.getTag(), equalTo(processorTag)); assertThat(failProcessor.getMessage().execute(Collections.emptyMap()), equalTo("error")); } @@ -53,11 +52,20 @@ public class FailProcessorFactoryTests extends ESTestCase { public void testCreateMissingMessageField() throws Exception { Map config = new HashMap<>(); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[message] required property is missing")); } } + public void testInvalidMustacheTemplate() throws Exception { + FailProcessor.Factory factory = new FailProcessor.Factory(TestTemplateService.instance(true)); + Map config = new HashMap<>(); + config.put("message", "error"); + String processorTag = randomAsciiOfLength(10); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); + assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); + assertThat(exception.getHeader("processor_tag").get(0), equalTo(processorTag)); + } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java index d45e98ab06a..49611d76f40 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java @@ -19,11 +19,9 @@ package org.elasticsearch.ingest.common; -import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ingest.Processor; -import org.elasticsearch.ingest.ProcessorsRegistry; import org.elasticsearch.ingest.TestProcessor; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -31,43 +29,69 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; -import static org.mockito.Mockito.mock; +import static org.hamcrest.Matchers.equalTo; public class ForEachProcessorFactoryTests extends ESTestCase { public void testCreate() throws Exception { - ProcessorsRegistry.Builder builder = new ProcessorsRegistry.Builder(); - Processor processor = new TestProcessor(ingestDocument -> {}); - builder.registerProcessor("_name", (registry) -> config -> processor); - ProcessorsRegistry registry = builder.build(mock(ScriptService.class), mock(ClusterService.class)); - ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(registry); + Processor processor = new TestProcessor(ingestDocument -> { }); + Map registry = new HashMap<>(); + registry.put("_name", (r, t, c) -> processor); + ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "_field"); - config.put("processors", Collections.singletonList(Collections.singletonMap("_name", Collections.emptyMap()))); - ForEachProcessor forEachProcessor = forEachFactory.create(config); + config.put("processor", Collections.singletonMap("_name", Collections.emptyMap())); + ForEachProcessor forEachProcessor = forEachFactory.create(registry, null, config); assertThat(forEachProcessor, Matchers.notNullValue()); - assertThat(forEachProcessor.getField(), Matchers.equalTo("_field")); - assertThat(forEachProcessor.getProcessors().size(), Matchers.equalTo(1)); - assertThat(forEachProcessor.getProcessors().get(0), Matchers.sameInstance(processor)); + assertThat(forEachProcessor.getField(), equalTo("_field")); + assertThat(forEachProcessor.getProcessor(), Matchers.sameInstance(processor)); + } - config = new HashMap<>(); - config.put("processors", Collections.singletonList(Collections.singletonMap("_name", Collections.emptyMap()))); - try { - forEachFactory.create(config); - fail("exception expected"); - } catch (Exception e) { - assertThat(e.getMessage(), Matchers.equalTo("[field] required property is missing")); - } + public void testCreateWithTooManyProcessorTypes() throws Exception { + Processor processor = new TestProcessor(ingestDocument -> { }); + Map registry = new HashMap<>(); + registry.put("_first", (r, t, c) -> processor); + registry.put("_second", (r, t, c) -> processor); + ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(); - config = new HashMap<>(); + Map config = new HashMap<>(); config.put("field", "_field"); - try { - forEachFactory.create(config); - fail("exception expected"); - } catch (Exception e) { - assertThat(e.getMessage(), Matchers.equalTo("[processors] required property is missing")); - } + Map processorTypes = new HashMap<>(); + processorTypes.put("_first", Collections.emptyMap()); + processorTypes.put("_second", Collections.emptyMap()); + config.put("processor", processorTypes); + Exception exception = expectThrows(ElasticsearchParseException.class, () -> forEachFactory.create(registry, null, config)); + assertThat(exception.getMessage(), equalTo("[processor] Must specify exactly one processor type")); + } + + public void testCreateWithNonExistingProcessorType() throws Exception { + ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("processor", Collections.singletonMap("_name", Collections.emptyMap())); + Exception expectedException = expectThrows(ElasticsearchParseException.class, + () -> forEachFactory.create(Collections.emptyMap(), null, config)); + assertThat(expectedException.getMessage(), equalTo("No processor type exists with name [_name]")); + } + + public void testCreateWithMissingField() throws Exception { + Processor processor = new TestProcessor(ingestDocument -> { }); + Map registry = new HashMap<>(); + registry.put("_name", (r, t, c) -> processor); + ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(); + Map config = new HashMap<>(); + config.put("processor", Collections.singletonList(Collections.singletonMap("_name", Collections.emptyMap()))); + Exception exception = expectThrows(Exception.class, () -> forEachFactory.create(registry, null, config)); + assertThat(exception.getMessage(), equalTo("[field] required property is missing")); + } + + public void testCreateWithMissingProcessor() { + ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(); + Map config = new HashMap<>(); + config.put("field", "_field"); + Exception exception = expectThrows(Exception.class, () -> forEachFactory.create(Collections.emptyMap(), null, config)); + assertThat(exception.getMessage(), equalTo("[processor] required property is missing")); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java index 05287935b49..714722418e7 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.TemplateService; import org.elasticsearch.ingest.TestProcessor; import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.ingest.ValueSource; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -50,7 +49,7 @@ public class ForEachProcessorTests extends ESTestCase { ); ForEachProcessor processor = new ForEachProcessor( - "_tag", "values", Collections.singletonList(new UppercaseProcessor("_tag", "_value")) + "_tag", "values", new UppercaseProcessor("_tag", "_value") ); processor.execute(ingestDocument); @@ -70,7 +69,7 @@ public class ForEachProcessorTests extends ESTestCase { throw new RuntimeException("failure"); } }); - ForEachProcessor processor = new ForEachProcessor("_tag", "values", Collections.singletonList(testProcessor)); + ForEachProcessor processor = new ForEachProcessor("_tag", "values", testProcessor); try { processor.execute(ingestDocument); fail("exception expected"); @@ -90,8 +89,7 @@ public class ForEachProcessorTests extends ESTestCase { }); Processor onFailureProcessor = new TestProcessor(ingestDocument1 -> {}); processor = new ForEachProcessor( - "_tag", "values", - Collections.singletonList(new CompoundProcessor(false, Arrays.asList(testProcessor), Arrays.asList(onFailureProcessor))) + "_tag", "values", new CompoundProcessor(false, Arrays.asList(testProcessor), Arrays.asList(onFailureProcessor)) ); processor.execute(ingestDocument); assertThat(testProcessor.getInvokedCounter(), equalTo(3)); @@ -111,7 +109,7 @@ public class ForEachProcessorTests extends ESTestCase { id.setFieldValue("_value.type", id.getSourceAndMetadata().get("_type")); id.setFieldValue("_value.id", id.getSourceAndMetadata().get("_id")); }); - ForEachProcessor processor = new ForEachProcessor("_tag", "values", Collections.singletonList(innerProcessor)); + ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor); processor.execute(ingestDocument); assertThat(innerProcessor.getInvokedCounter(), equalTo(2)); @@ -138,9 +136,7 @@ public class ForEachProcessorTests extends ESTestCase { TemplateService ts = TestTemplateService.instance(); ForEachProcessor processor = new ForEachProcessor( - "_tag", "values", Arrays.asList( - new AppendProcessor("_tag", ts.compile("flat_values"), ValueSource.wrap("value", ts)), - new SetProcessor("_tag", ts.compile("_value.new_field"), (model) -> model.get("other"))) + "_tag", "values", new SetProcessor("_tag", ts.compile("_value.new_field"), (model) -> model.get("other")) ); processor.execute(ingestDocument); @@ -149,21 +145,10 @@ public class ForEachProcessorTests extends ESTestCase { assertThat(ingestDocument.getFieldValue("values.2.new_field", String.class), equalTo("value")); assertThat(ingestDocument.getFieldValue("values.3.new_field", String.class), equalTo("value")); assertThat(ingestDocument.getFieldValue("values.4.new_field", String.class), equalTo("value")); - - List flatValues = ingestDocument.getFieldValue("flat_values", List.class); - assertThat(flatValues.size(), equalTo(5)); - assertThat(flatValues.get(0), equalTo("value")); - assertThat(flatValues.get(1), equalTo("value")); - assertThat(flatValues.get(2), equalTo("value")); - assertThat(flatValues.get(3), equalTo("value")); - assertThat(flatValues.get(4), equalTo("value")); } public void testRandom() throws Exception { - int numProcessors = randomInt(8); - List processors = new ArrayList<>(numProcessors); - for (int i = 0; i < numProcessors; i++) { - processors.add(new Processor() { + Processor innerProcessor = new Processor() { @Override public void execute(IngestDocument ingestDocument) throws Exception { String existingValue = ingestDocument.getFieldValue("_value", String.class); @@ -179,8 +164,7 @@ public class ForEachProcessorTests extends ESTestCase { public String getTag() { return null; } - }); - } + }; int numValues = randomIntBetween(1, 32); List values = new ArrayList<>(numValues); for (int i = 0; i < numValues; i++) { @@ -190,18 +174,13 @@ public class ForEachProcessorTests extends ESTestCase { "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values) ); - ForEachProcessor processor = new ForEachProcessor("_tag", "values", processors); + ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor); processor.execute(ingestDocument); List result = ingestDocument.getFieldValue("values", List.class); assertThat(result.size(), equalTo(numValues)); - String expectedString = ""; - for (int i = 0; i < numProcessors; i++) { - expectedString = expectedString + "."; - } - for (String r : result) { - assertThat(r, equalTo(expectedString)); + assertThat(r, equalTo(".")); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java index a7a133b4363..20537d2ced6 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.Collections; @@ -39,8 +38,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("patterns", Collections.singletonList("(?\\w+)")); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - GrokProcessor processor = factory.create(config); + GrokProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getMatchField(), equalTo("_field")); assertThat(processor.getGrok(), notNullValue()); @@ -50,7 +48,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap()); Map config = new HashMap<>(); config.put("patterns", Collections.singletonList("(?\\w+)")); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); assertThat(e.getMessage(), equalTo("[field] required property is missing")); } @@ -58,7 +56,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { GrokProcessor.Factory factory = new GrokProcessor.Factory(Collections.emptyMap()); Map config = new HashMap<>(); config.put("field", "foo"); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); assertThat(e.getMessage(), equalTo("[patterns] required property is missing")); } @@ -67,7 +65,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "foo"); config.put("patterns", Collections.emptyList()); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); assertThat(e.getMessage(), equalTo("[patterns] List of patterns must not be empty")); } @@ -78,7 +76,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("patterns", Collections.singletonList("%{MY_PATTERN:name}!")); config.put("pattern_definitions", Collections.singletonMap("MY_PATTERN", "foo")); - GrokProcessor processor = factory.create(config); + GrokProcessor processor = factory.create(null, null, config); assertThat(processor.getMatchField(), equalTo("_field")); assertThat(processor.getGrok(), notNullValue()); assertThat(processor.getGrok().match("foo!"), equalTo(true)); @@ -89,7 +87,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("patterns", Collections.singletonList("[")); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); assertThat(e.getMessage(), equalTo("[patterns] Invalid regex pattern found in: [[]. premature end of char-class")); } @@ -99,7 +97,7 @@ public class GrokProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("patterns", Collections.singletonList("%{MY_PATTERN:name}!")); config.put("pattern_definitions", Collections.singletonMap("MY_PATTERN", "[")); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(config)); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); assertThat(e.getMessage(), equalTo("[patterns] Invalid regex pattern found in: [%{MY_PATTERN:name}!]. premature end of char-class")); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java index 60cceb34024..51e246c67e9 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GsubProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -38,8 +37,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { config.put("pattern", "\\."); config.put("replacement", "-"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - GsubProcessor gsubProcessor = factory.create(config); + GsubProcessor gsubProcessor = factory.create(null, processorTag, config); assertThat(gsubProcessor.getTag(), equalTo(processorTag)); assertThat(gsubProcessor.getField(), equalTo("field1")); assertThat(gsubProcessor.getPattern().toString(), equalTo("\\.")); @@ -52,7 +50,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { config.put("pattern", "\\."); config.put("replacement", "-"); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); @@ -65,7 +63,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("replacement", "-"); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[pattern] required property is missing")); @@ -78,7 +76,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("pattern", "\\."); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[replacement] required property is missing")); @@ -92,7 +90,7 @@ public class GsubProcessorFactoryTests extends ESTestCase { config.put("pattern", "["); config.put("replacement", "-"); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("[pattern] Invalid regex pattern. Unclosed character class")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorFactoryTests.java index 970fd8b8b9a..68b2daecb4a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JoinProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -36,8 +35,7 @@ public class JoinProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("separator", "-"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - JoinProcessor joinProcessor = factory.create(config); + JoinProcessor joinProcessor = factory.create(null, processorTag, config); assertThat(joinProcessor.getTag(), equalTo(processorTag)); assertThat(joinProcessor.getField(), equalTo("field1")); assertThat(joinProcessor.getSeparator(), equalTo("-")); @@ -48,7 +46,7 @@ public class JoinProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("separator", "-"); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); @@ -60,7 +58,7 @@ public class JoinProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[separator] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorFactoryTests.java index 4dec115458c..4d98efc4bb0 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/LowercaseProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -35,8 +34,7 @@ public class LowercaseProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - LowercaseProcessor uppercaseProcessor = factory.create(config); + LowercaseProcessor uppercaseProcessor = (LowercaseProcessor)factory.create(null, processorTag, config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); } @@ -45,7 +43,7 @@ public class LowercaseProcessorFactoryTests extends ESTestCase { LowercaseProcessor.Factory factory = new LowercaseProcessor.Factory(); Map config = new HashMap<>(); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java index a5f88103e96..71e878744d5 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorFactoryTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.ingest.common; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -44,8 +44,7 @@ public class RemoveProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - RemoveProcessor removeProcessor = factory.create(config); + RemoveProcessor removeProcessor = factory.create(null, processorTag, config); assertThat(removeProcessor.getTag(), equalTo(processorTag)); assertThat(removeProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); } @@ -53,11 +52,20 @@ public class RemoveProcessorFactoryTests extends ESTestCase { public void testCreateMissingField() throws Exception { Map config = new HashMap<>(); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); } } + public void testInvalidMustacheTemplate() throws Exception { + RemoveProcessor.Factory factory = new RemoveProcessor.Factory(TestTemplateService.instance(true)); + Map config = new HashMap<>(); + config.put("field", "field1"); + String processorTag = randomAsciiOfLength(10); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); + assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); + assertThat(exception.getHeader("processor_tag").get(0), equalTo(processorTag)); + } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java index c078f09dd92..68f28cb30d7 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -36,8 +35,7 @@ public class RenameProcessorFactoryTests extends ESTestCase { config.put("field", "old_field"); config.put("target_field", "new_field"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - RenameProcessor renameProcessor = factory.create(config); + RenameProcessor renameProcessor = factory.create(null, processorTag, config); assertThat(renameProcessor.getTag(), equalTo(processorTag)); assertThat(renameProcessor.getField(), equalTo("old_field")); assertThat(renameProcessor.getTargetField(), equalTo("new_field")); @@ -48,7 +46,7 @@ public class RenameProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("target_field", "new_field"); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); @@ -60,7 +58,7 @@ public class RenameProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "old_field"); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[target_field] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java index ed47894d4d9..806aacd9e23 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorFactoryTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -55,8 +56,7 @@ public class ScriptProcessorFactoryTests extends ESTestCase { configMap.put("lang", "mockscript"); ElasticsearchException exception = expectThrows(ElasticsearchException.class, - () -> factory.doCreate(randomAsciiOfLength(10), configMap)); - + () -> factory.create(null, randomAsciiOfLength(10), configMap)); assertThat(exception.getMessage(), is("[null] Only one of [file], [id], or [inline] may be configured")); } @@ -66,7 +66,7 @@ public class ScriptProcessorFactoryTests extends ESTestCase { configMap.put("lang", "mockscript"); ElasticsearchException exception = expectThrows(ElasticsearchException.class, - () -> factory.doCreate(randomAsciiOfLength(10), configMap)); + () -> factory.create(null, randomAsciiOfLength(10), configMap)); assertThat(exception.getMessage(), is("[null] Need [file], [id], or [inline] parameter to refer to scripts")); } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java index 5fe7db77dd8..24ca1cf39a3 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java @@ -42,7 +42,7 @@ public class ScriptProcessorTests extends ESTestCase { int randomInt = randomInt(); ScriptService scriptService = mock(ScriptService.class); CompiledScript compiledScript = mock(CompiledScript.class); - Script script = mock(Script.class); + Script script = new Script("_script"); when(scriptService.compile(any(), any(), any())).thenReturn(compiledScript); ExecutableScript executableScript = mock(ExecutableScript.class); when(scriptService.executable(any(), any())).thenReturn(executableScript); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java index b8c97a379cb..45f144e3305 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.ingest.common; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; import org.junit.Before; @@ -45,8 +45,7 @@ public class SetProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("value", "value1"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - SetProcessor setProcessor = factory.create(config); + SetProcessor setProcessor = factory.create(null, processorTag, config); assertThat(setProcessor.getTag(), equalTo(processorTag)); assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); assertThat(setProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo("value1")); @@ -60,8 +59,7 @@ public class SetProcessorFactoryTests extends ESTestCase { config.put("value", "value1"); config.put("override", overrideEnabled); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - SetProcessor setProcessor = factory.create(config); + SetProcessor setProcessor = factory.create(null, processorTag, config); assertThat(setProcessor.getTag(), equalTo(processorTag)); assertThat(setProcessor.getField().execute(Collections.emptyMap()), equalTo("field1")); assertThat(setProcessor.getValue().copyAndResolve(Collections.emptyMap()), equalTo("value1")); @@ -72,7 +70,7 @@ public class SetProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("value", "value1"); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); @@ -83,7 +81,7 @@ public class SetProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[value] required property is missing")); @@ -95,11 +93,22 @@ public class SetProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("value", null); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[value] required property is missing")); } } + public void testInvalidMustacheTemplate() throws Exception { + SetProcessor.Factory factory = new SetProcessor.Factory(TestTemplateService.instance(true)); + Map config = new HashMap<>(); + config.put("field", "field1"); + config.put("value", "value1"); + String processorTag = randomAsciiOfLength(10); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, config)); + assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); + assertThat(exception.getHeader("processor_tag").get(0), equalTo(processorTag)); + } + } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorFactoryTests.java index c747807b710..bef9a84a1e2 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -36,8 +35,7 @@ public class SplitProcessorFactoryTests extends ESTestCase { config.put("field", "field1"); config.put("separator", "\\."); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - SplitProcessor splitProcessor = factory.create(config); + SplitProcessor splitProcessor = factory.create(null, processorTag, config); assertThat(splitProcessor.getTag(), equalTo(processorTag)); assertThat(splitProcessor.getField(), equalTo("field1")); assertThat(splitProcessor.getSeparator(), equalTo("\\.")); @@ -48,7 +46,7 @@ public class SplitProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("separator", "\\."); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); @@ -60,7 +58,7 @@ public class SplitProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[separator] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorTests.java index 13d45dc126b..edb3639679c 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SplitProcessorTests.java @@ -84,7 +84,7 @@ public class SplitProcessorTests extends ESTestCase { Map splitConfig = new HashMap<>(); splitConfig.put("field", "flags"); splitConfig.put("separator", "\\|"); - Processor splitProcessor = (new SplitProcessor.Factory()).create(splitConfig); + Processor splitProcessor = (new SplitProcessor.Factory()).create(null, null, splitConfig); Map source = new HashMap<>(); source.put("flags", "new|hot|super|fun|interesting"); IngestDocument ingestDocument = new IngestDocument(source, new HashMap<>()); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorFactoryTests.java index 54904775478..1bd57c79aca 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/TrimProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -35,8 +34,7 @@ public class TrimProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - TrimProcessor uppercaseProcessor = factory.create(config); + TrimProcessor uppercaseProcessor = (TrimProcessor)factory.create(null, processorTag, config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); } @@ -45,7 +43,7 @@ public class TrimProcessorFactoryTests extends ESTestCase { TrimProcessor.Factory factory = new TrimProcessor.Factory(); Map config = new HashMap<>(); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorFactoryTests.java index cd4d1faf767..0f834119510 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/UppercaseProcessorFactoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import java.util.HashMap; @@ -35,8 +34,7 @@ public class UppercaseProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "field1"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - UppercaseProcessor uppercaseProcessor = factory.create(config); + UppercaseProcessor uppercaseProcessor = (UppercaseProcessor)factory.create(null, processorTag, config); assertThat(uppercaseProcessor.getTag(), equalTo(processorTag)); assertThat(uppercaseProcessor.getField(), equalTo("field1")); } @@ -45,7 +43,7 @@ public class UppercaseProcessorFactoryTests extends ESTestCase { UppercaseProcessor.Factory factory = new UppercaseProcessor.Factory(); Map config = new HashMap<>(); try { - factory.create(config); + factory.create(null, null, config); fail("factory create should have failed"); } catch(ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[field] required property is missing")); diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/80_foreach.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/80_foreach.yaml index fc0fca81b09..2ebfc089396 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/80_foreach.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/80_foreach.yaml @@ -10,13 +10,11 @@ { "foreach" : { "field" : "values", - "processors" : [ - { + "processor" : { "uppercase" : { "field" : "_value" } - } - ] + } } } ] diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yaml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yaml index 4dfe8a31ee8..7ce30ee3bde 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yaml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/90_simulate.yaml @@ -481,7 +481,7 @@ - match: { docs.0.processor_results.4.doc._source.status: 200 } --- -"Test verbose simulate with ignore_failure": +"Test verbose simulate with ignore_failure and thrown exception": - do: ingest.simulate: verbose: true @@ -547,5 +547,54 @@ - match: { docs.0.processor_results.0.doc._source.field1: "123.42 400 " } - match: { docs.0.processor_results.0.doc._source.status: 200 } - match: { docs.0.processor_results.1.tag: "rename-1" } + - match: { docs.0.processor_results.1.ignored_error.error.type: "illegal_argument_exception" } + - match: { docs.0.processor_results.1.ignored_error.error.reason: "field [foofield] doesn't exist" } - match: { docs.0.processor_results.1.doc._source.field1: "123.42 400 " } - match: { docs.0.processor_results.1.doc._source.status: 200 } + +--- +"Test verbose simulate with ignore_failure and no exception thrown": + - do: + ingest.simulate: + verbose: true + body: > + { + "pipeline" : { + "description": "_description", + "processors": [ + { + "set" : { + "tag" : "setstatus-1", + "field" : "status", + "value" : 200 + } + }, + { + "rename" : { + "tag" : "rename-1", + "field" : "status", + "target_field" : "new_status", + "ignore_failure": true + } + } + ] + }, + "docs": [ + { + "_index": "index", + "_type": "type", + "_id": "id", + "_source": { + "field1": "123.42 400 " + } + } + ] + } + - length: { docs: 1 } + - length: { docs.0.processor_results: 2 } + - match: { docs.0.processor_results.0.tag: "setstatus-1" } + - match: { docs.0.processor_results.0.doc._source.field1: "123.42 400 " } + - match: { docs.0.processor_results.0.doc._source.status: 200 } + - length: { docs.0.processor_results.1: 2 } + - match: { docs.0.processor_results.1.tag: "rename-1" } + - match: { docs.0.processor_results.1.doc._source.new_status: 200 } diff --git a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java index 105d42c8c86..a83dd93a17e 100644 --- a/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java +++ b/modules/lang-groovy/src/main/java/org/elasticsearch/script/groovy/GroovyScriptEngineService.java @@ -155,7 +155,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri GroovyClassLoader groovyClassLoader = new GroovyClassLoader(loader, configuration); return groovyClassLoader.parseClass(codeSource); - } catch (Throwable e) { + } catch (Exception e) { if (logger.isTraceEnabled()) { logger.trace("Exception compiling Groovy script:", e); } @@ -293,7 +293,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri // NOTE: we truncate the stack because IndyInterface has security issue (needs getClassLoader) // we don't do a security check just as a tradeoff, it cannot really escalate to anything. return AccessController.doPrivileged((PrivilegedAction) script::run); - } catch (Throwable e) { + } catch (Exception e) { if (logger.isTraceEnabled()) { logger.trace("failed to run {}", e, compiledScript); } diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java index 13868566eac..cda5ba2161e 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java @@ -455,16 +455,21 @@ public class BulkTests extends ESIntegTestCase { byte[] addParent = new BytesArray("{\"index\" : { \"_index\" : \"test\", \"_type\" : \"parent\", \"_id\" : \"parent1\"}}\n" + "{\"field1\" : \"value1\"}\n").array(); - byte[] addChild = new BytesArray("{\"update\" : { \"_id\" : \"child1\", \"_type\" : \"child\", \"_index\" : \"test\", \"parent\" : \"parent1\"} }\n" + + byte[] addChild1 = new BytesArray("{\"update\" : { \"_id\" : \"child1\", \"_type\" : \"child\", \"_index\" : \"test\", \"parent\" : \"parent1\"} }\n" + + "{ \"script\" : {\"inline\" : \"ctx._source.field2 = 'value2'\"}, \"upsert\" : {\"field1\" : \"value1\"}}\n").array(); + + byte[] addChild2 = new BytesArray("{\"update\" : { \"_id\" : \"child1\", \"_type\" : \"child\", \"_index\" : \"test\", \"parent\" : \"parent1\"} }\n" + "{ \"script\" : \"ctx._source.field2 = 'value2'\", \"upsert\" : {\"field1\" : \"value1\"}}\n").array(); builder.add(addParent, 0, addParent.length); - builder.add(addChild, 0, addChild.length); + builder.add(addChild1, 0, addChild1.length); + builder.add(addChild2, 0, addChild2.length); BulkResponse bulkResponse = builder.get(); - assertThat(bulkResponse.getItems().length, equalTo(2)); + assertThat(bulkResponse.getItems().length, equalTo(3)); assertThat(bulkResponse.getItems()[0].isFailed(), equalTo(false)); assertThat(bulkResponse.getItems()[1].isFailed(), equalTo(false)); + assertThat(bulkResponse.getItems()[2].isFailed(), equalTo(false)); client().admin().indices().prepareRefresh("test").get(); @@ -616,7 +621,6 @@ public class BulkTests extends ESIntegTestCase { // issue 6410 public void testThatMissingIndexDoesNotAbortFullBulkRequest() throws Exception{ createIndex("bulkindex1", "bulkindex2"); - ensureYellow(); BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(new IndexRequest("bulkindex1", "index1_type", "1").source("text", "hallo1")) .add(new IndexRequest("bulkindex2", "index2_type", "1").source("text", "hallo2")) @@ -639,7 +643,6 @@ public class BulkTests extends ESIntegTestCase { // issue 9821 public void testFailedRequestsOnClosedIndex() throws Exception { createIndex("bulkindex1"); - ensureYellow(); client().prepareIndex("bulkindex1", "index1_type", "1").setSource("text", "test").get(); assertAcked(client().admin().indices().prepareClose("bulkindex1")); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java index 6ed77fffcc8..adbc66c2202 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java @@ -48,6 +48,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; +import java.util.Locale; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -624,6 +625,32 @@ public class DoubleTermsTests extends AbstractTermsTestCase { } } + public void testPartiallyUnmappedWithFormat() throws Exception { + SearchResponse response = client().prepareSearch("idx_unmapped", "idx").setTypes("type") + .addAggregation(terms("terms") + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .format("0000.00")) + .execute().actionGet(); + + assertSearchResponse(response); + + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + assertThat(terms.getBuckets().size(), equalTo(5)); + + for (int i = 0; i < 5; i++) { + String key = String.format(Locale.ROOT, "%07.2f", (double) i); + Terms.Bucket bucket = terms.getBucketByKey(key); + assertThat(bucket, notNullValue()); + assertThat(key(bucket), equalTo(key)); + assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); + assertThat(bucket.getDocCount(), equalTo(1L)); + } + } + public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java index cfee7388d09..0f9279da8f2 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java @@ -61,7 +61,6 @@ public class FunctionScoreTests extends ESIntegTestCase { public void testScriptScoresNested() throws IOException { createIndex(INDEX); - ensureYellow(); index(INDEX, TYPE, "1", jsonBuilder().startObject().field("dummy_field", 1).endObject()); refresh(); SearchResponse response = client().search( @@ -82,7 +81,6 @@ public class FunctionScoreTests extends ESIntegTestCase { public void testScriptScoresWithAgg() throws IOException { createIndex(INDEX); - ensureYellow(); index(INDEX, TYPE, "1", jsonBuilder().startObject().field("dummy_field", 1).endObject()); refresh(); SearchResponse response = client().search( @@ -100,7 +98,6 @@ public class FunctionScoreTests extends ESIntegTestCase { public void testMinScoreFunctionScoreBasic() throws IOException { index(INDEX, TYPE, jsonBuilder().startObject().field("num", 2).endObject()); refresh(); - ensureYellow(); float score = randomFloat(); float minScore = randomFloat(); SearchResponse searchResponse = client().search( @@ -136,7 +133,6 @@ public class FunctionScoreTests extends ESIntegTestCase { docs.add(client().prepareIndex(INDEX, TYPE, Integer.toString(i)).setSource("num", i + scoreOffset)); } indexRandom(true, docs); - ensureYellow(); Script script = new Script("return (doc['num'].value)"); int numMatchingDocs = numDocs + scoreOffset - minScore; if (numMatchingDocs < 0) { @@ -172,7 +168,6 @@ public class FunctionScoreTests extends ESIntegTestCase { /** make sure min_score works if functions is empty, see https://github.com/elastic/elasticsearch/issues/10253 */ public void testWithEmptyFunctions() throws IOException, ExecutionException, InterruptedException { assertAcked(prepareCreate("test")); - ensureYellow(); index("test", "testtype", "1", jsonBuilder().startObject().field("text", "test text").endObject()); refresh(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java index e9c4bf6e359..0d7dd4e12e2 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java @@ -72,28 +72,28 @@ public class GeoDistanceTests extends ESIntegTestCase { refresh(); - SearchResponse searchResponse1 = client().prepareSearch().addField("_source") + SearchResponse searchResponse1 = client().prepareSearch().addStoredField("_source") .addScriptField("distance", new Script("doc['location'].arcDistance(" + target_lat + "," + target_long + ")")).execute() .actionGet(); Double resultDistance1 = searchResponse1.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance1, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.01d)); - SearchResponse searchResponse2 = client().prepareSearch().addField("_source") + SearchResponse searchResponse2 = client().prepareSearch().addStoredField("_source") .addScriptField("distance", new Script("doc['location'].distance(" + target_lat + "," + target_long + ")")).execute() .actionGet(); Double resultDistance2 = searchResponse2.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance2, closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.01d)); - SearchResponse searchResponse3 = client().prepareSearch().addField("_source") + SearchResponse searchResponse3 = client().prepareSearch().addStoredField("_source") .addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + target_lat + "," + target_long + ")")) .execute().actionGet(); Double resultArcDistance3 = searchResponse3.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultArcDistance3, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d)); - SearchResponse searchResponse4 = client().prepareSearch().addField("_source") + SearchResponse searchResponse4 = client().prepareSearch().addStoredField("_source") .addScriptField("distance", new Script("doc['location'].distanceInKm(" + target_lat + "," + target_long + ")")).execute() .actionGet(); Double resultDistance4 = searchResponse4.getHits().getHits()[0].getFields().get("distance").getValue(); @@ -102,7 +102,7 @@ public class GeoDistanceTests extends ESIntegTestCase { SearchResponse searchResponse5 = client() .prepareSearch() - .addField("_source") + .addStoredField("_source") .addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + (target_lat) + "," + (target_long + 360) + ")")) .execute().actionGet(); Double resultArcDistance5 = searchResponse5.getHits().getHits()[0].getFields().get("distance").getValue(); @@ -111,21 +111,21 @@ public class GeoDistanceTests extends ESIntegTestCase { SearchResponse searchResponse6 = client() .prepareSearch() - .addField("_source") + .addStoredField("_source") .addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + (target_lat + 360) + "," + (target_long) + ")")) .execute().actionGet(); Double resultArcDistance6 = searchResponse6.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultArcDistance6, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d)); - SearchResponse searchResponse7 = client().prepareSearch().addField("_source") + SearchResponse searchResponse7 = client().prepareSearch().addStoredField("_source") .addScriptField("distance", new Script("doc['location'].arcDistanceInMiles(" + target_lat + "," + target_long + ")")) .execute().actionGet(); Double resultDistance7 = searchResponse7.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance7, closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.01d)); - SearchResponse searchResponse8 = client().prepareSearch().addField("_source") + SearchResponse searchResponse8 = client().prepareSearch().addStoredField("_source") .addScriptField("distance", new Script("doc['location'].distanceInMiles(" + target_lat + "," + target_long + ")")) .execute().actionGet(); Double resultDistance8 = searchResponse8.getHits().getHits()[0].getFields().get("distance").getValue(); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java index 1cef890813f..f8cab2998dc 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java @@ -423,7 +423,6 @@ public class IndexLookupTests extends ESIntegTestCase { .put("index.analysis.filter.delimited_int.encoding", "int") .put("index.analysis.filter.delimited_int.type", "delimited_payload_filter") .put("index.number_of_shards", 1))); - ensureYellow(); indexRandom(true, client().prepareIndex("test", "type1", "1").setSource("float_payload_field", "a|1 b|2 a|3 b "), client() .prepareIndex("test", "type1", "2").setSource("string_payload_field", "a|a b|b a|a b "), client().prepareIndex("test", "type1", "3").setSource("float_payload_field", "a|4 b|5 a|6 b "), diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java index fa049429a3a..7bb1ed6fd65 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java @@ -47,6 +47,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; +import java.util.Locale; import java.util.Map; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; @@ -629,6 +630,32 @@ public class LongTermsTests extends AbstractTermsTestCase { } } + public void testPartiallyUnmappedWithFormat() throws Exception { + SearchResponse response = client().prepareSearch("idx_unmapped", "idx").setTypes("type") + .addAggregation(terms("terms") + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .format("0000")) + .execute().actionGet(); + + assertSearchResponse(response); + + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + assertThat(terms.getBuckets().size(), equalTo(5)); + + for (int i = 0; i < 5; i++) { + String key = String.format(Locale.ROOT, "%04d", i); + Terms.Bucket bucket = terms.getBucketByKey(key); + assertThat(bucket, notNullValue()); + assertThat(key(bucket), equalTo(key)); + assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i)); + assertThat(bucket.getDocCount(), equalTo(1L)); + } + } + public void testEmptyAggregation() throws Exception { SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") .setQuery(matchAllQuery()) diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java index 662d4d2f30c..640c00b291d 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MinDocCountTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.messy.tests; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; @@ -316,7 +315,8 @@ public class MinDocCountTests extends AbstractTermsTestCase { Thread.sleep(60000); logger.debug("1m passed. retrying."); testMinDocCountOnTerms(field, script, order, include, false); - } catch (Throwable secondFailure) { + } catch (Exception secondFailure) { + secondFailure.addSuppressed(ae); logger.error("exception on retry (will re-throw the original in a sec)", secondFailure); } throw ae; diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java index 59a3602b1ca..6afa738569c 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/RandomScoreFunctionTests.java @@ -125,7 +125,6 @@ public class RandomScoreFunctionTests extends ESIntegTestCase { public void testScoreAccessWithinScript() throws Exception { assertAcked(prepareCreate("test").addMapping("type", "body", "type=text", "index", "type=" + randomFrom("short", "float", "long", "integer", "double"))); - ensureYellow(); int docCount = randomIntBetween(100, 200); for (int i = 0; i < docCount; i++) { diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java index 28894c5c1cc..03b85d57e0e 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SearchFieldsTests.java @@ -83,7 +83,6 @@ public class SearchFieldsTests extends ESIntegTestCase { public void testStoredFields() throws Exception { createIndex("test"); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") @@ -102,33 +101,33 @@ public class SearchFieldsTests extends ESIntegTestCase { client().admin().indices().prepareRefresh().execute().actionGet(); - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field1").execute().actionGet(); + SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field1").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1")); // field2 is not stored, check that it is not extracted from source. - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field2").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field2").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(0)); assertThat(searchResponse.getHits().getAt(0).fields().get("field2"), nullValue()); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field3").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field3").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("*3").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("*3").addField("field1").addField("field2").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").addStoredField("field1").addStoredField("field2").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(2)); @@ -136,20 +135,20 @@ public class SearchFieldsTests extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("field*").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field*").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(2)); assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("f*3").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("f*3").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("*").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).source(), nullValue()); @@ -157,7 +156,7 @@ public class SearchFieldsTests extends ESIntegTestCase { assertThat(searchResponse.getHits().getAt(0).fields().get("field1").value().toString(), equalTo("value1")); assertThat(searchResponse.getHits().getAt(0).fields().get("field3").value().toString(), equalTo("value3")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addField("*").addField("_source").execute().actionGet(); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*").addStoredField("_source").execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).source(), notNullValue()); @@ -168,7 +167,6 @@ public class SearchFieldsTests extends ESIntegTestCase { public void testScriptDocAndFields() throws Exception { createIndex("test"); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("num1").field("type", "double").field("store", true).endObject() @@ -252,7 +250,6 @@ public class SearchFieldsTests extends ESIntegTestCase { public void testUidBasedScriptFields() throws Exception { prepareCreate("test").addMapping("type1", "num1", "type=long").execute().actionGet(); - ensureYellow(); int numDocs = randomIntBetween(1, 30); IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[numDocs]; @@ -328,7 +325,6 @@ public class SearchFieldsTests extends ESIntegTestCase { public void testScriptFieldUsingSource() throws Exception { createIndex("test"); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); client().prepareIndex("test", "type1", "1") .setSource(jsonBuilder().startObject() @@ -387,8 +383,6 @@ public class SearchFieldsTests extends ESIntegTestCase { public void testPartialFields() throws Exception { createIndex("test"); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); - client().prepareIndex("test", "type1", "1").setSource(XContentFactory.jsonBuilder().startObject() .field("field1", "value1") @@ -406,7 +400,6 @@ public class SearchFieldsTests extends ESIntegTestCase { public void testStoredFieldsWithoutSource() throws Exception { createIndex("test"); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_source").field("enabled", false).endObject().startObject("properties") .startObject("byte_field").field("type", "byte").field("store", true).endObject() @@ -437,15 +430,15 @@ public class SearchFieldsTests extends ESIntegTestCase { client().admin().indices().prepareRefresh().execute().actionGet(); SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()) - .addField("byte_field") - .addField("short_field") - .addField("integer_field") - .addField("long_field") - .addField("float_field") - .addField("double_field") - .addField("date_field") - .addField("boolean_field") - .addField("binary_field") + .addStoredField("byte_field") + .addStoredField("short_field") + .addStoredField("integer_field") + .addStoredField("long_field") + .addStoredField("float_field") + .addStoredField("double_field") + .addStoredField("date_field") + .addStoredField("boolean_field") + .addStoredField("binary_field") .execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); @@ -465,7 +458,7 @@ public class SearchFieldsTests extends ESIntegTestCase { String dateTime = Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC)); assertThat(searchResponse.getHits().getAt(0).fields().get("date_field").value(), equalTo((Object) dateTime)); assertThat(searchResponse.getHits().getAt(0).fields().get("boolean_field").value(), equalTo((Object) Boolean.TRUE)); - assertThat(((BytesReference) searchResponse.getHits().getAt(0).fields().get("binary_field").value()).toBytesArray(), equalTo((BytesReference) new BytesArray("testing text".getBytes("UTF8")))); + assertThat(((BytesReference) searchResponse.getHits().getAt(0).fields().get("binary_field").value()), equalTo((BytesReference) new BytesArray("testing text".getBytes("UTF8")))); } @@ -478,7 +471,7 @@ public class SearchFieldsTests extends ESIntegTestCase { SearchResponse searchResponse = client().prepareSearch("my-index") .setTypes("my-type1") - .addField("field1").addField("_routing") + .addStoredField("field1").addStoredField("_routing") .get(); assertThat(searchResponse.getHits().totalHits(), equalTo(1L)); @@ -493,7 +486,7 @@ public class SearchFieldsTests extends ESIntegTestCase { .setRefreshPolicy(IMMEDIATE) .get(); - assertFailures(client().prepareSearch("my-index").setTypes("my-type1").addField("field1"), + assertFailures(client().prepareSearch("my-index").setTypes("my-type1").addStoredField("field1"), RestStatus.BAD_REQUEST, containsString("field [field1] isn't a leaf field")); } @@ -557,14 +550,14 @@ public class SearchFieldsTests extends ESIntegTestCase { String field = "field1.field2.field3.field4"; - SearchResponse searchResponse = client().prepareSearch("my-index").setTypes("my-type1").addField(field).get(); + SearchResponse searchResponse = client().prepareSearch("my-index").setTypes("my-type1").addStoredField(field).get(); assertThat(searchResponse.getHits().totalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field(field).isMetadataField(), equalTo(false)); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().size(), equalTo(2)); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1")); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2")); - searchResponse = client().prepareSearch("my-index").setTypes("my-type2").addField(field).get(); + searchResponse = client().prepareSearch("my-index").setTypes("my-type2").addStoredField(field).get(); assertThat(searchResponse.getHits().totalHits(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field(field).isMetadataField(), equalTo(false)); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().size(), equalTo(2)); @@ -587,7 +580,6 @@ public class SearchFieldsTests extends ESIntegTestCase { public void testFieldsPulledFromFieldData() throws Exception { createIndex("test"); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForYellowStatus().execute().actionGet(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_source").field("enabled", false).endObject().startObject("properties") .startObject("text_field").field("type", "text").field("fielddata", true).endObject() @@ -621,16 +613,16 @@ public class SearchFieldsTests extends ESIntegTestCase { client().admin().indices().prepareRefresh().execute().actionGet(); SearchRequestBuilder builder = client().prepareSearch().setQuery(matchAllQuery()) - .addFieldDataField("text_field") - .addFieldDataField("keyword_field") - .addFieldDataField("byte_field") - .addFieldDataField("short_field") - .addFieldDataField("integer_field") - .addFieldDataField("long_field") - .addFieldDataField("float_field") - .addFieldDataField("double_field") - .addFieldDataField("date_field") - .addFieldDataField("boolean_field"); + .addDocValueField("text_field") + .addDocValueField("keyword_field") + .addDocValueField("byte_field") + .addDocValueField("short_field") + .addDocValueField("integer_field") + .addDocValueField("long_field") + .addDocValueField("float_field") + .addDocValueField("double_field") + .addDocValueField("date_field") + .addDocValueField("boolean_field"); SearchResponse searchResponse = builder.execute().actionGet(); assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); @@ -704,7 +696,7 @@ public class SearchFieldsTests extends ESIntegTestCase { .setParent("parent_1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject())); - SearchResponse response = client().prepareSearch("test").addField("field1").get(); + SearchResponse response = client().prepareSearch("test").addStoredField("field1").get(); assertSearchResponse(response); assertHitCount(response, 1); diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java index ebc03bea2b3..867e6d29c88 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/messy/tests/package-info.java @@ -36,27 +36,19 @@ */ /* List of renames that took place: renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/AvgTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketScriptTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BucketSelectorTests.java renamed: core/src/test/java/org/elasticsearch/document/BulkIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/BulkTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/CardinalityTests.java renamed: core/src/test/java/org/elasticsearch/search/child/ChildQuerySearchIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ChildQuerySearchTests.java renamed: core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ContextAndHeaderTransportTests.java ^^^^^ note: the methods from this test using mustache were moved to the mustache module under its messy tests package. renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateHistogramTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DateRangeTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/DoubleTermsTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/EquivalenceTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/ExtendedStatsTests.java renamed: core/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/FunctionScoreTests.java renamed: core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/GeoDistanceTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentileRanksTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HDRPercentilesTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/HistogramTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/IPv4RangeIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IPv4RangeTests.java renamed: core/src/test/java/org/elasticsearch/script/IndexLookupIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexLookupTests.java renamed: core/src/test/java/org/elasticsearch/script/IndexedScriptIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndexedScriptTests.java - renamed: core/src/test/java/org/elasticsearch/action/IndicesRequestIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/IndicesRequestTests.java renamed: core/src/test/java/org/elasticsearch/search/innerhits/InnerHitsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/InnerHitsTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/LongTermsTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/MaxTests.java @@ -76,8 +68,6 @@ renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StatsTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/StringTermsTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/SumTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentileRanksTests.java - renamed: core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TDigestPercentilesTests.java renamed: core/src/test/java/org/elasticsearch/search/aggregations/bucket/TopHitsIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TopHitsTests.java renamed: core/src/test/java/org/elasticsearch/index/mapper/TransformOnIndexMapperIT.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/messy/tests/TransformOnIndexMapperTests.java renamed: core/src/main/java/org/elasticsearch/script/groovy/GroovyScriptCompilationException.java -> plugins/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyRestIT.java diff --git a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java index 4a7b4350d23..f2eee2bb408 100644 --- a/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java +++ b/modules/lang-groovy/src/test/java/org/elasticsearch/script/groovy/GroovyScriptTests.java @@ -99,15 +99,15 @@ public class GroovyScriptTests extends ESIntegTestCase { try { client().prepareSearch("test") - .setQuery(constantScoreQuery(scriptQuery(new Script("assert false", ScriptType.INLINE, "groovy", null)))).get(); + .setQuery(constantScoreQuery(scriptQuery(new Script("null.foo", ScriptType.INLINE, "groovy", null)))).get(); fail("should have thrown an exception"); } catch (SearchPhaseExecutionException e) { assertThat(e.toString() + "should not contained NotSerializableTransportException", e.toString().contains("NotSerializableTransportException"), equalTo(false)); assertThat(e.toString() + "should have contained ScriptException", e.toString().contains("ScriptException"), equalTo(true)); - assertThat(e.toString()+ "should have contained an assert error", - e.toString().contains("AssertionError[assert false"), equalTo(true)); + assertThat(e.toString()+ "should have contained a NullPointerException", + e.toString().contains("NullPointerException[Cannot get property 'foo' on null object]"), equalTo(true)); } } diff --git a/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/15_update.yaml b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/15_update.yaml index 2a0b909f40c..683b2c6a2df 100644 --- a/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/15_update.yaml +++ b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/15_update.yaml @@ -18,11 +18,11 @@ index: test_1 type: test id: 1 - script: "1" body: - lang: groovy - script: "ctx._source.foo = bar" - params: { bar: 'xxx' } + script: + lang: groovy + inline: "ctx._source.foo = bar" + params: { bar: 'xxx' } - match: { _index: test_1 } - match: { _type: test } @@ -43,8 +43,10 @@ index: test_1 type: test id: 1 - lang: groovy - script: "ctx._source.foo = 'yyy'" + body: + script: + lang: groovy + inline: "ctx._source.foo = 'yyy'" - match: { _index: test_1 } - match: { _type: test } @@ -67,9 +69,10 @@ type: test id: 1 body: - script: "1" - lang: "doesnotexist" - params: { bar: 'xxx' } + script: + inline: "1" + lang: "doesnotexist" + params: { bar: 'xxx' } - do: catch: /script_lang not supported \[doesnotexist\]/ @@ -77,6 +80,8 @@ index: test_1 type: test id: 1 - lang: doesnotexist - script: "1" + body: + script: + lang: doesnotexist + inline: "1" diff --git a/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/25_script_upsert.yaml b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/25_script_upsert.yaml index 622bc633df0..3ab70d084fa 100644 --- a/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/25_script_upsert.yaml +++ b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/25_script_upsert.yaml @@ -10,8 +10,9 @@ type: test id: 1 body: - script: "ctx._source.foo = bar" - params: { bar: 'xxx' } + script: + inline: "ctx._source.foo = bar" + params: { bar: 'xxx' } upsert: { foo: baz } - do: @@ -29,8 +30,9 @@ type: test id: 1 body: - script: "ctx._source.foo = bar" - params: { bar: 'xxx' } + script: + inline: "ctx._source.foo = bar" + params: { bar: 'xxx' } upsert: { foo: baz } - do: @@ -47,8 +49,9 @@ type: test id: 2 body: - script: "ctx._source.foo = bar" - params: { bar: 'xxx' } + script: + inline: "ctx._source.foo = bar" + params: { bar: 'xxx' } upsert: { foo: baz } scripted_upsert: true diff --git a/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/90_missing.yaml b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/90_missing.yaml index 5f583973caf..c49565a6304 100644 --- a/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/90_missing.yaml +++ b/modules/lang-groovy/src/test/resources/rest-api-spec/test/lang_groovy/90_missing.yaml @@ -31,8 +31,9 @@ type: test id: 1 body: - script: "ctx._source.foo = bar" - params: { bar: 'xxx' } + script: + inline: "ctx._source.foo = bar" + params: { bar: 'xxx' } - do: update: @@ -41,5 +42,6 @@ id: 1 ignore: 404 body: - script: "ctx._source.foo = bar" - params: { bar: 'xxx' } + script: + inline: "ctx._source.foo = bar" + params: { bar: 'xxx' } diff --git a/modules/lang-mustache/build.gradle b/modules/lang-mustache/build.gradle index eb5a48485b4..271faa23608 100644 --- a/modules/lang-mustache/build.gradle +++ b/modules/lang-mustache/build.gradle @@ -20,6 +20,7 @@ esplugin { description 'Mustache scripting integration for Elasticsearch' classname 'org.elasticsearch.script.mustache.MustachePlugin' + hasClientJar = true // For the template query } dependencies { diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateResponse.java index c779757f61b..f9e99ffc7e3 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/MultiSearchTemplateResponse.java @@ -40,21 +40,21 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera */ public static class Item implements Streamable { private SearchTemplateResponse response; - private Throwable throwable; + private Exception exception; Item() { } - public Item(SearchTemplateResponse response, Throwable throwable) { + public Item(SearchTemplateResponse response, Exception exception) { this.response = response; - this.throwable = throwable; + this.exception = exception; } /** * Is it a failed search? */ public boolean isFailure() { - return throwable != null; + return exception != null; } /** @@ -62,7 +62,7 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera */ @Nullable public String getFailureMessage() { - return throwable == null ? null : throwable.getMessage(); + return exception == null ? null : exception.getMessage(); } /** @@ -85,7 +85,7 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera this.response = new SearchTemplateResponse(); response.readFrom(in); } else { - throwable = in.readThrowable(); + exception = in.readException(); } } @@ -96,12 +96,12 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera response.writeTo(out); } else { out.writeBoolean(false); - out.writeThrowable(throwable); + out.writeException(exception); } } - public Throwable getFailure() { - return throwable; + public Exception getFailure() { + return exception; } } @@ -150,7 +150,7 @@ public class MultiSearchTemplateResponse extends ActionResponse implements Itera for (Item item : items) { builder.startObject(); if (item.isFailure()) { - ElasticsearchException.renderThrowable(builder, params, item.getFailure()); + ElasticsearchException.renderException(builder, params, item.getFailure()); } else { item.getResponse().toXContent(builder, params); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportMultiSearchTemplateAction.java index 1ffb19b5fc4..642fe7648da 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/action/search/template/TransportMultiSearchTemplateAction.java @@ -61,7 +61,7 @@ public class TransportMultiSearchTemplateAction extends HandledTransportAction implements Iterable { + static final class ArrayMap extends AbstractMap implements Iterable { private final Object array; private final int length; @@ -109,7 +109,7 @@ final class CustomReflectionObjectHandler extends ReflectionObjectHandler { } - final static class CollectionMap extends AbstractMap implements Iterable { + static final class CollectionMap extends AbstractMap implements Iterable { private final Collection col; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java index d3ffa13cd54..5d7b3b4fb56 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MustachePlugin.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.action.search.template.RestDeleteSearchTemplateAction; import org.elasticsearch.rest.action.search.template.RestGetSearchTemplateAction; @@ -41,7 +42,9 @@ import org.elasticsearch.script.ScriptEngineService; import java.util.Arrays; import java.util.List; -public class MustachePlugin extends Plugin implements ScriptPlugin, ActionPlugin { +import static java.util.Collections.singletonList; + +public class MustachePlugin extends Plugin implements ScriptPlugin, ActionPlugin, SearchPlugin { @Override public ScriptEngineService getScriptEngineService(Settings settings) { @@ -54,6 +57,11 @@ public class MustachePlugin extends Plugin implements ScriptPlugin, ActionPlugin new ActionHandler<>(MultiSearchTemplateAction.INSTANCE, TransportMultiSearchTemplateAction.class)); } + @Override + public List> getQueries() { + return singletonList(new QuerySpec<>(TemplateQueryBuilder.NAME, TemplateQueryBuilder::new, TemplateQueryBuilder::fromXContent)); + } + @Override public List> getRestHandlers() { return Arrays.asList(RestSearchTemplateAction.class, RestMultiSearchTemplateAction.class, RestGetSearchTemplateAction.class, diff --git a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java similarity index 57% rename from core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java rename to modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java index 3b47378d1e8..297ff841a1f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TemplateQueryBuilder.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TemplateQueryBuilder.java @@ -16,11 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.index.query; +package org.elasticsearch.script.mustache; import org.apache.lucene.search.Query; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -28,14 +26,20 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryRewriteContext; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.script.Template; import java.io.IOException; import java.util.Collections; -import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -44,67 +48,37 @@ import java.util.Optional; * Facilitates creating template query requests. * */ public class TemplateQueryBuilder extends AbstractQueryBuilder { - /** Name to reference this type of query. */ public static final String NAME = "template"; - public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); - - private final static Map parametersToTypes = new HashMap<>(); - static { - parametersToTypes.put("query", ScriptService.ScriptType.INLINE); - parametersToTypes.put("file", ScriptService.ScriptType.FILE); - parametersToTypes.put("id", ScriptService.ScriptType.STORED); - } /** Template to fill. */ - private final Template template; + private final Script template; - /** - * @param template - * the template to use for that query. - * */ - public TemplateQueryBuilder(Template template) { + public TemplateQueryBuilder(String template, ScriptService.ScriptType scriptType, Map params) { + this.template = new Script(template, scriptType, "mustache", params); + } + + public TemplateQueryBuilder(String template, ScriptService.ScriptType scriptType, Map params, XContentType ct) { + this.template = new Script(template, scriptType, "mustache", params, ct); + } + + // for tests, so that mock script can be used: + TemplateQueryBuilder(Script template) { if (template == null) { throw new IllegalArgumentException("query template cannot be null"); } this.template = template; } - public Template template() { + public Script template() { return template; } - /** - * @param template - * the template to use for that query. - * @param vars - * the parameters to fill the template with. - * @deprecated Use {@link #TemplateQueryBuilder(Template)} instead. - * */ - @Deprecated - public TemplateQueryBuilder(String template, Map vars) { - this(new Template(template, ScriptService.ScriptType.INLINE, null, null, vars)); - } - - /** - * @param template - * the template to use for that query. - * @param vars - * the parameters to fill the template with. - * @param templateType - * what kind of template (INLINE,FILE,ID) - * @deprecated Use {@link #TemplateQueryBuilder(Template)} instead. - * */ - @Deprecated - public TemplateQueryBuilder(String template, ScriptService.ScriptType templateType, Map vars) { - this(new Template(template, templateType, null, null, vars)); - } - /** * Read from a stream. */ public TemplateQueryBuilder(StreamInput in) throws IOException { super(in); - template = new Template(in); + template = new Script(in); } @Override @@ -118,42 +92,6 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { - XContentParser parser = parseContext.parser(); - Template template = parse(parser, parseContext.getParseFieldMatcher()); - return Optional.of(new TemplateQueryBuilder(template)); - } - - public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, String... parameters) throws IOException { - Map parameterMap = new HashMap<>(parametersToTypes); - for (String parameter : parameters) { - parameterMap.put(parameter, ScriptService.ScriptType.INLINE); - } - return parse(parser, parameterMap, parseFieldMatcher); - } - - public static Template parse(String defaultLang, XContentParser parser, - ParseFieldMatcher parseFieldMatcher, String... parameters) throws IOException { - Map parameterMap = new HashMap<>(parametersToTypes); - for (String parameter : parameters) { - parameterMap.put(parameter, ScriptService.ScriptType.INLINE); - } - return Template.parse(parser, parameterMap, defaultLang, parseFieldMatcher); - } - - public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException { - return parse(parser, parametersToTypes, parseFieldMatcher); - } - - public static Template parse(XContentParser parser, Map parameterMap, - ParseFieldMatcher parseFieldMatcher) throws IOException { - return Template.parse(parser, parameterMap, parseFieldMatcher); - } - @Override public String getWriteableName() { return NAME; @@ -182,7 +120,7 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder new ParsingException(qSourceParser.getTokenLocation(), "inner query in [" + NAME + "] cannot be empty"));; + () -> new ParsingException(qSourceParser.getTokenLocation(), "inner query in [" + NAME + "] cannot be empty")); if (boost() != DEFAULT_BOOST || queryName() != null) { final BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); boolQueryBuilder.must(queryBuilder); @@ -191,4 +129,14 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { + XContentParser parser = parseContext.parser(); + Script template = Script.parse(parser, parseContext.getParseFieldMatcher(), "mustache"); + return Optional.of(new TemplateQueryBuilder(template)); + } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateIT.java index 9c28225c081..03d04e518dc 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/action/search/template/MultiSearchTemplateIT.java @@ -146,14 +146,14 @@ public class MultiSearchTemplateIT extends ESIntegTestCase { SearchTemplateResponse searchTemplateResponse1 = response1.getResponse(); assertThat(searchTemplateResponse1.hasResponse(), is(true)); assertHitCount(searchTemplateResponse1.getResponse(), (numDocs / 2) + (numDocs % 2)); - assertThat(searchTemplateResponse1.getSource().toUtf8(), + assertThat(searchTemplateResponse1.getSource().utf8ToString(), equalTo("{\"query\":{\"match\":{\"odd\":\"true\"}}}")); MultiSearchTemplateResponse.Item response2 = response.getResponses()[1]; assertThat(response2.isFailure(), is(false)); SearchTemplateResponse searchTemplateResponse2 = response2.getResponse(); assertThat(searchTemplateResponse2.hasResponse(), is(false)); - assertThat(searchTemplateResponse2.getSource().toUtf8(), + assertThat(searchTemplateResponse2.getSource().utf8ToString(), equalTo("{\"query\":{\"match_phrase_prefix\":{\"message\":\"quick brown f\"}}}")); MultiSearchTemplateResponse.Item response3 = response.getResponses()[2]; @@ -161,7 +161,7 @@ public class MultiSearchTemplateIT extends ESIntegTestCase { SearchTemplateResponse searchTemplateResponse3 = response3.getResponse(); assertThat(searchTemplateResponse3.hasResponse(), is(true)); assertHitCount(searchTemplateResponse3.getResponse(), (numDocs / 2)); - assertThat(searchTemplateResponse3.getSource().toUtf8(), + assertThat(searchTemplateResponse3.getSource().utf8ToString(), equalTo("{\"query\":{\"term\":{\"odd\":\"false\"}}}")); MultiSearchTemplateResponse.Item response4 = response.getResponses()[3]; @@ -173,7 +173,7 @@ public class MultiSearchTemplateIT extends ESIntegTestCase { assertThat(response5.isFailure(), is(false)); SearchTemplateResponse searchTemplateResponse5 = response5.getResponse(); assertThat(searchTemplateResponse5.hasResponse(), is(false)); - assertThat(searchTemplateResponse5.getSource().toUtf8(), + assertThat(searchTemplateResponse5.getSource().utf8ToString(), equalTo("{\"query\":{\"terms\":{\"group\":[1,2,3,]}}}")); } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java index 8d258865acb..9c09e9245f6 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryParserTests.java @@ -46,7 +46,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.query.TemplateQueryBuilder; +import org.elasticsearch.script.mustache.TemplateQueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.similarity.SimilarityService; @@ -118,7 +118,7 @@ public class TemplateQueryParserTests extends ESTestCase { b.bind(CircuitBreakerService.class).to(NoneCircuitBreakerService.class); }, settingsModule, - new SearchModule(settings, new NamedWriteableRegistry()) { + new SearchModule(settings, new NamedWriteableRegistry(), false, emptyList()) { @Override protected void configureSearch() { // skip so we don't need transport @@ -153,7 +153,7 @@ public class TemplateQueryParserTests extends ESTestCase { } public void testParser() throws IOException { - String templateString = "{" + "\"query\":{\"match_{{template}}\": {}}," + "\"params\":{\"template\":\"all\"}" + "}"; + String templateString = "{" + "\"inline\":{\"match_{{template}}\": {}}," + "\"params\":{\"template\":\"all\"}" + "}"; XContentParser templateSourceParser = XContentFactory.xContent(templateString).createParser(templateString); QueryShardContext context = contextFactory.get(); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java index 7ab58b7d1a7..90f73da8fb8 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/messy/tests/TemplateQueryTests.java @@ -26,20 +26,15 @@ import org.elasticsearch.action.search.template.SearchTemplateAction; import org.elasticsearch.action.search.template.SearchTemplateRequest; import org.elasticsearch.action.search.template.SearchTemplateRequestBuilder; import org.elasticsearch.action.search.template.SearchTemplateResponse; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.env.Environment; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.TemplateQueryBuilder; +import org.elasticsearch.script.mustache.TemplateQueryBuilder; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.action.search.template.RestSearchTemplateAction; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.Template; import org.elasticsearch.script.mustache.MustachePlugin; import org.elasticsearch.script.mustache.MustacheScriptEngineService; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -98,8 +93,7 @@ public class TemplateQueryTests extends ESIntegTestCase { Map vars = new HashMap<>(); vars.put("template", "all"); - TemplateQueryBuilder builder = new TemplateQueryBuilder(new Template("{\"match_{{template}}\": {}}\"", ScriptType.INLINE, null, - null, vars)); + TemplateQueryBuilder builder = new TemplateQueryBuilder("{\"match_{{template}}\": {}}\"", ScriptType.INLINE,vars); SearchResponse sr = client().prepareSearch().setQuery(builder) .execute().actionGet(); assertHitCount(sr, 2); @@ -111,9 +105,8 @@ public class TemplateQueryTests extends ESIntegTestCase { SearchResponse sr = client().prepareSearch() .setSource( new SearchSourceBuilder().size(0).query( - QueryBuilders.templateQuery(new Template("{ \"match_{{template}}\": {} }", - ScriptType.INLINE, null, null, params)))).execute() - .actionGet(); + new TemplateQueryBuilder("{ \"match_{{template}}\": {} }", ScriptType.INLINE, params))) + .get(); assertNoFailures(sr); assertThat(sr.getHits().hits().length, equalTo(0)); } @@ -121,8 +114,7 @@ public class TemplateQueryTests extends ESIntegTestCase { public void testTemplateWOReplacementInBody() throws IOException { Map vars = new HashMap<>(); - TemplateQueryBuilder builder = new TemplateQueryBuilder(new Template( - "{\"match_all\": {}}\"", ScriptType.INLINE, null, null, vars)); + TemplateQueryBuilder builder = new TemplateQueryBuilder("{\"match_all\": {}}\"", ScriptType.INLINE, vars); SearchResponse sr = client().prepareSearch().setQuery(builder) .execute().actionGet(); assertHitCount(sr, 2); @@ -132,8 +124,7 @@ public class TemplateQueryTests extends ESIntegTestCase { Map vars = new HashMap<>(); vars.put("template", "all"); - TemplateQueryBuilder builder = new TemplateQueryBuilder(new Template( - "storedTemplate", ScriptService.ScriptType.FILE, null, null, vars)); + TemplateQueryBuilder builder = new TemplateQueryBuilder("storedTemplate", ScriptService.ScriptType.FILE, vars); SearchResponse sr = client().prepareSearch().setQuery(builder) .execute().actionGet(); assertHitCount(sr, 2); @@ -142,7 +133,7 @@ public class TemplateQueryTests extends ESIntegTestCase { public void testRawFSTemplate() throws IOException { Map params = new HashMap<>(); params.put("template", "all"); - TemplateQueryBuilder builder = new TemplateQueryBuilder(new Template("storedTemplate", ScriptType.FILE, null, null, params)); + TemplateQueryBuilder builder = new TemplateQueryBuilder("storedTemplate", ScriptType.FILE, params); SearchResponse sr = client().prepareSearch().setQuery(builder).get(); assertHitCount(sr, 2); } @@ -158,12 +149,6 @@ public class TemplateQueryTests extends ESIntegTestCase { assertHitCount(response.getResponse(), 2); } - private Template parseTemplate(String template) throws IOException { - try (XContentParser parser = XContentFactory.xContent(template).createParser(template)) { - return TemplateQueryBuilder.parse(parser, ParseFieldMatcher.EMPTY, "params", "template"); - } - } - // Relates to #6318 public void testSearchRequestFail() throws Exception { String query = "{ \"query\": {\"match_all\": {}}, \"size\" : \"{{my_size}}\" }"; @@ -420,8 +405,7 @@ public class TemplateQueryTests extends ESIntegTestCase { Map vars = new HashMap<>(); vars.put("fieldParam", "bar"); - TemplateQueryBuilder builder = new TemplateQueryBuilder(new Template( - "3", ScriptService.ScriptType.STORED, null, null, vars)); + TemplateQueryBuilder builder = new TemplateQueryBuilder("3", ScriptService.ScriptType.STORED, vars); SearchResponse sr = client().prepareSearch().setQuery(builder) .execute().actionGet(); assertHitCount(sr, 1); @@ -429,11 +413,11 @@ public class TemplateQueryTests extends ESIntegTestCase { // "{\"template\": {\"id\": \"3\",\"params\" : {\"fieldParam\" : \"foo\"}}}"; Map params = new HashMap<>(); params.put("fieldParam", "foo"); - TemplateQueryBuilder templateQuery = new TemplateQueryBuilder(new Template("3", ScriptType.STORED, null, null, params)); + TemplateQueryBuilder templateQuery = new TemplateQueryBuilder("3", ScriptType.STORED, params); sr = client().prepareSearch().setQuery(templateQuery).get(); assertHitCount(sr, 4); - templateQuery = new TemplateQueryBuilder(new Template("/mustache/3", ScriptType.STORED, null, null, params)); + templateQuery = new TemplateQueryBuilder("/mustache/3", ScriptType.STORED, params); sr = client().prepareSearch().setQuery(templateQuery).get(); assertHitCount(sr, 4); } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java index 054268ef681..bdba0346a85 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheScriptEngineTests.java @@ -28,7 +28,6 @@ import org.junit.Before; import java.io.IOException; import java.io.StringWriter; -import java.nio.charset.Charset; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -58,7 +57,7 @@ public class MustacheScriptEngineTests extends ESTestCase { BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(null, template, compileParams)), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.3 } }}", - new String(o.toBytes(), Charset.forName("UTF-8"))); + o.utf8ToString()); } { String template = "GET _search {\"query\": " + "{\"boosting\": {" + "\"positive\": {\"match\": {\"body\": \"gift\"}}," @@ -69,7 +68,7 @@ public class MustacheScriptEngineTests extends ESTestCase { BytesReference o = (BytesReference) qe.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "", "mustache", qe.compile(null, template, compileParams)), vars).run(); assertEquals("GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"\\\"quick brown\\\"\"}}}, \"negative_boost\": 0.3 } }}", - new String(o.toBytes(), Charset.forName("UTF-8"))); + o.utf8ToString()); } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java index 8b6d0e69573..91098eb1c88 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MustacheTests.java @@ -72,7 +72,7 @@ public class MustacheTests extends ESTestCase { "Mustache templating broken", "GET _search {\"query\": {\"boosting\": {\"positive\": {\"match\": {\"body\": \"gift\"}}," + "\"negative\": {\"term\": {\"body\": {\"value\": \"solr\"}}}, \"negative_boost\": 0.2 } }}", - ((BytesReference) result.run()).toUtf8() + ((BytesReference) result.run()).utf8ToString() ); } @@ -88,7 +88,7 @@ public class MustacheTests extends ESTestCase { assertThat(output, notNullValue()); assertThat(output, instanceOf(BytesReference.class)); BytesReference bytes = (BytesReference) output; - assertThat(bytes.toUtf8(), equalTo("foo bar")); + assertThat(bytes.utf8ToString(), equalTo("foo bar")); // Sets can come out in any order Set setData = new HashSet<>(); @@ -99,7 +99,7 @@ public class MustacheTests extends ESTestCase { assertThat(output, notNullValue()); assertThat(output, instanceOf(BytesReference.class)); bytes = (BytesReference) output; - assertThat(bytes.toUtf8(), both(containsString("foo")).and(containsString("bar"))); + assertThat(bytes.utf8ToString(), both(containsString("foo")).and(containsString("bar"))); } public void testArrayInArrayAccess() throws Exception { @@ -116,7 +116,7 @@ public class MustacheTests extends ESTestCase { assertThat(output, notNullValue()); assertThat(output, instanceOf(BytesReference.class)); BytesReference bytes = (BytesReference) output; - assertThat(bytes.toUtf8(), equalTo("foo bar")); + assertThat(bytes.utf8ToString(), equalTo("foo bar")); } public void testMapInArrayAccess() throws Exception { @@ -131,7 +131,7 @@ public class MustacheTests extends ESTestCase { assertThat(output, notNullValue()); assertThat(output, instanceOf(BytesReference.class)); BytesReference bytes = (BytesReference) output; - assertThat(bytes.toUtf8(), equalTo("foo bar")); + assertThat(bytes.utf8ToString(), equalTo("foo bar")); // HashSet iteration order isn't fixed Set setData = new HashSet<>(); @@ -142,7 +142,7 @@ public class MustacheTests extends ESTestCase { assertThat(output, notNullValue()); assertThat(output, instanceOf(BytesReference.class)); bytes = (BytesReference) output; - assertThat(bytes.toUtf8(), both(containsString("foo")).and(containsString("bar"))); + assertThat(bytes.utf8ToString(), both(containsString("foo")).and(containsString("bar"))); } public void testEscaping() { @@ -152,7 +152,7 @@ public class MustacheTests extends ESTestCase { CompiledScript compiledScript = new CompiledScript(INLINE, "name", "mustache", mustache); ExecutableScript executableScript = engine.executable(compiledScript, Collections.singletonMap("value", "a \"value\"")); BytesReference rawResult = (BytesReference) executableScript.run(); - String result = rawResult.toUtf8(); + String result = rawResult.utf8ToString(); assertThat(result, equalTo("{ \"field1\": \"a \\\"value\\\"\"}")); // json string escaping disabled: @@ -160,7 +160,7 @@ public class MustacheTests extends ESTestCase { compiledScript = new CompiledScript(INLINE, "name", "mustache", mustache); executableScript = engine.executable(compiledScript, Collections.singletonMap("value", "a \"value\"")); rawResult = (BytesReference) executableScript.run(); - result = rawResult.toUtf8(); + result = rawResult.utf8ToString(); assertThat(result, equalTo("{ \"field1\": \"a \"value\"\"}")); } @@ -182,7 +182,7 @@ public class MustacheTests extends ESTestCase { BytesReference bytes = (BytesReference) output; String expectedString = String.format(Locale.ROOT, "%s %s", randomArrayValues.length, randomList.size()); - assertThat(bytes.toUtf8(), equalTo(expectedString)); + assertThat(bytes.utf8ToString(), equalTo(expectedString)); } public void testPrimitiveToJSON() throws Exception { @@ -378,7 +378,7 @@ public class MustacheTests extends ESTestCase { Object result = engine.executable(new CompiledScript(INLINE, "inline", "mustache", compile(script)), vars).run(); assertThat(result, notNullValue()); assertThat(result, instanceOf(BytesReference.class)); - assertThat(((BytesReference) result).toUtf8(), matcher); + assertThat(((BytesReference) result).utf8ToString(), matcher); } private Object compile(String script) { diff --git a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java similarity index 72% rename from core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java rename to modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java index d0548fd2780..96d6ce28dc1 100644 --- a/core/src/test/java/org/elasticsearch/index/query/TemplateQueryBuilderTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java @@ -17,21 +17,29 @@ * under the License. */ -package org.elasticsearch.index.query; +package org.elasticsearch.script.mustache; import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.script.Script.ScriptParseException; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.script.Template; import org.elasticsearch.test.AbstractQueryTestCase; import org.junit.Before; import java.io.IOException; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -43,9 +51,14 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase> getPlugins() { + return Collections.singleton(MustachePlugin.class); + } + @Before public void before() { - templateBase = RandomQueryBuilder.createQuery(random()); + templateBase = new MatchQueryBuilder("field", "some values"); } @Override @@ -55,7 +68,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase new TemplateQueryBuilder((Template) null)); + expectThrows(IllegalArgumentException.class, () -> new TemplateQueryBuilder((Script) null)); } /** @@ -79,7 +92,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase vars = new HashMap<>(); vars.put("template", "filled"); - TemplateQueryBuilder builder = new TemplateQueryBuilder( - new Template("I am a $template string", ScriptType.INLINE, null, null, vars)); + TemplateQueryBuilder builder = new TemplateQueryBuilder("I am a $template string", ScriptType.INLINE, vars); XContentBuilder content = XContentFactory.jsonBuilder(); content.startObject(); builder.doXContent(content, null); @@ -100,29 +112,27 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase params = new HashMap<>(); params.put("template", "all"); - QueryBuilder expectedBuilder = new TemplateQueryBuilder(new Template(expectedTemplateString, ScriptType.INLINE, null, null, - params)); + QueryBuilder expectedBuilder = new TemplateQueryBuilder(expectedTemplateString, ScriptType.INLINE, params); assertParsedQuery(query, expectedBuilder); } public void testRawTemplate() throws IOException { String expectedTemplateString = "{\"match_{{template}}\":{}}"; - String query = "{\"template\": {\"query\": {\"match_{{template}}\": {}},\"params\" : {\"template\" : \"all\"}}}"; + String query = "{\"template\": {\"inline\": {\"match_{{template}}\": {}},\"params\" : {\"template\" : \"all\"}}}"; Map params = new HashMap<>(); params.put("template", "all"); - QueryBuilder expectedBuilder = new TemplateQueryBuilder(new Template(expectedTemplateString, ScriptType.INLINE, null, - XContentType.JSON, params)); + QueryBuilder expectedBuilder = new TemplateQueryBuilder(expectedTemplateString, ScriptType.INLINE, params, XContentType.JSON); assertParsedQuery(query, expectedBuilder); } @Override public void testMustRewrite() throws IOException { String query = "{ \"match_all\" : {}}"; - QueryBuilder builder = new TemplateQueryBuilder(new Template(query, ScriptType.INLINE, "mockscript", - XContentType.JSON, Collections.emptyMap())); + QueryBuilder builder = new TemplateQueryBuilder(new Script(query, ScriptType.INLINE, "mockscript", + Collections.emptyMap(), XContentType.JSON)); try { builder.toQuery(createShardContext()); fail(); @@ -134,24 +144,24 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase clazz) { + private void addStruct(final String name, final Class clazz) { if (!name.matches("^[_a-zA-Z][\\.,_a-zA-Z0-9]*$")) { throw new IllegalArgumentException("Invalid struct name [" + name + "]."); } @@ -661,7 +661,7 @@ public final class Definition { simpleTypesMap.put(name, getTypeInternal(name)); } - private final void addConstructorInternal(final String struct, final String name, final Type[] args) { + private void addConstructorInternal(final String struct, final String name, final Type[] args) { final Struct owner = structsMap.get(struct); if (owner == null) { @@ -734,7 +734,7 @@ public final class Definition { * * no spaces allowed. */ - private final void addSignature(String className, String signature) { + private void addSignature(String className, String signature) { String elements[] = signature.split("\u0020"); if (elements.length != 2) { throw new IllegalArgumentException("Malformed signature: " + signature); @@ -774,8 +774,8 @@ public final class Definition { } } - private final void addMethodInternal(String struct, String name, boolean augmentation, - Type rtn, Type[] args) { + private void addMethodInternal(String struct, String name, boolean augmentation, + Type rtn, Type[] args) { final Struct owner = structsMap.get(struct); if (owner == null) { @@ -858,7 +858,7 @@ public final class Definition { } } - private final void addFieldInternal(String struct, String name, Type type) { + private void addFieldInternal(String struct, String name, Type type) { final Struct owner = structsMap.get(struct); if (owner == null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java index ca549522018..c546207b1ee 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java @@ -43,37 +43,37 @@ import java.util.regex.Pattern; */ public final class WriterConstants { - public final static int CLASS_VERSION = Opcodes.V1_8; - public final static int ASM_VERSION = Opcodes.ASM5; - public final static String BASE_CLASS_NAME = Executable.class.getName(); - public final static Type BASE_CLASS_TYPE = Type.getType(Executable.class); + public static final int CLASS_VERSION = Opcodes.V1_8; + public static final int ASM_VERSION = Opcodes.ASM5; + public static final String BASE_CLASS_NAME = Executable.class.getName(); + public static final Type BASE_CLASS_TYPE = Type.getType(Executable.class); - public final static String CLASS_NAME = BASE_CLASS_NAME + "$Script"; - public final static Type CLASS_TYPE = Type.getObjectType(CLASS_NAME.replace('.', '/')); + public static final String CLASS_NAME = BASE_CLASS_NAME + "$Script"; + public static final Type CLASS_TYPE = Type.getObjectType(CLASS_NAME.replace('.', '/')); - public final static Method CONSTRUCTOR = getAsmMethod(void.class, "", String.class, String.class, BitSet.class); - public final static Method CLINIT = getAsmMethod(void.class, ""); - public final static Method EXECUTE = + public static final Method CONSTRUCTOR = getAsmMethod(void.class, "", String.class, String.class, BitSet.class); + public static final Method CLINIT = getAsmMethod(void.class, ""); + public static final Method EXECUTE = getAsmMethod(Object.class, "execute", Map.class, Scorer.class, LeafDocLookup.class, Object.class); - public final static Type PAINLESS_ERROR_TYPE = Type.getType(PainlessError.class); + public static final Type PAINLESS_ERROR_TYPE = Type.getType(PainlessError.class); - public final static Type NEEDS_SCORE_TYPE = Type.getType(NeedsScore.class); - public final static Type SCORER_TYPE = Type.getType(Scorer.class); - public final static Method SCORER_SCORE = getAsmMethod(float.class, "score"); + public static final Type NEEDS_SCORE_TYPE = Type.getType(NeedsScore.class); + public static final Type SCORER_TYPE = Type.getType(Scorer.class); + public static final Method SCORER_SCORE = getAsmMethod(float.class, "score"); - public final static Type MAP_TYPE = Type.getType(Map.class); - public final static Method MAP_GET = getAsmMethod(Object.class, "get", Object.class); + public static final Type MAP_TYPE = Type.getType(Map.class); + public static final Method MAP_GET = getAsmMethod(Object.class, "get", Object.class); - public final static Type ITERATOR_TYPE = Type.getType(Iterator.class); - public final static Method ITERATOR_HASNEXT = getAsmMethod(boolean.class, "hasNext"); - public final static Method ITERATOR_NEXT = getAsmMethod(Object.class, "next"); + public static final Type ITERATOR_TYPE = Type.getType(Iterator.class); + public static final Method ITERATOR_HASNEXT = getAsmMethod(boolean.class, "hasNext"); + public static final Method ITERATOR_NEXT = getAsmMethod(Object.class, "next"); - public final static Type UTILITY_TYPE = Type.getType(Utility.class); - public final static Method STRING_TO_CHAR = getAsmMethod(char.class, "StringTochar", String.class); - public final static Method CHAR_TO_STRING = getAsmMethod(String.class, "charToString", char.class); + public static final Type UTILITY_TYPE = Type.getType(Utility.class); + public static final Method STRING_TO_CHAR = getAsmMethod(char.class, "StringTochar", String.class); + public static final Method CHAR_TO_STRING = getAsmMethod(String.class, "charToString", char.class); - public final static Type METHOD_HANDLE_TYPE = Type.getType(MethodHandle.class); + public static final Type METHOD_HANDLE_TYPE = Type.getType(MethodHandle.class); public static final Type AUGMENTATION_TYPE = Type.getType(Augmentation.class); @@ -83,46 +83,46 @@ public final class WriterConstants { * because it can do it statically. This is both faster and prevents the script from doing something super slow like building a regex * per time it is run. */ - public final static Method PATTERN_COMPILE = getAsmMethod(Pattern.class, "compile", String.class, int.class); - public final static Method PATTERN_MATCHER = getAsmMethod(Matcher.class, "matcher", CharSequence.class); - public final static Method MATCHER_MATCHES = getAsmMethod(boolean.class, "matches"); - public final static Method MATCHER_FIND = getAsmMethod(boolean.class, "find"); + public static final Method PATTERN_COMPILE = getAsmMethod(Pattern.class, "compile", String.class, int.class); + public static final Method PATTERN_MATCHER = getAsmMethod(Matcher.class, "matcher", CharSequence.class); + public static final Method MATCHER_MATCHES = getAsmMethod(boolean.class, "matches"); + public static final Method MATCHER_FIND = getAsmMethod(boolean.class, "find"); /** dynamic callsite bootstrap signature */ - final static MethodType DEF_BOOTSTRAP_TYPE = + static final MethodType DEF_BOOTSTRAP_TYPE = MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class, int.class, int.class, Object[].class); - final static Handle DEF_BOOTSTRAP_HANDLE = + static final Handle DEF_BOOTSTRAP_HANDLE = new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(DefBootstrap.class), "bootstrap", DEF_BOOTSTRAP_TYPE.toMethodDescriptorString(), false); - public final static Type DEF_UTIL_TYPE = Type.getType(Def.class); - public final static Method DEF_TO_BOOLEAN = getAsmMethod(boolean.class, "DefToboolean" , Object.class); - public final static Method DEF_TO_BYTE_IMPLICIT = getAsmMethod(byte.class , "DefTobyteImplicit" , Object.class); - public final static Method DEF_TO_SHORT_IMPLICIT = getAsmMethod(short.class , "DefToshortImplicit" , Object.class); - public final static Method DEF_TO_CHAR_IMPLICIT = getAsmMethod(char.class , "DefTocharImplicit" , Object.class); - public final static Method DEF_TO_INT_IMPLICIT = getAsmMethod(int.class , "DefTointImplicit" , Object.class); - public final static Method DEF_TO_LONG_IMPLICIT = getAsmMethod(long.class , "DefTolongImplicit" , Object.class); - public final static Method DEF_TO_FLOAT_IMPLICIT = getAsmMethod(float.class , "DefTofloatImplicit" , Object.class); - public final static Method DEF_TO_DOUBLE_IMPLICIT = getAsmMethod(double.class , "DefTodoubleImplicit", Object.class); - public final static Method DEF_TO_BYTE_EXPLICIT = getAsmMethod(byte.class , "DefTobyteExplicit" , Object.class); - public final static Method DEF_TO_SHORT_EXPLICIT = getAsmMethod(short.class , "DefToshortExplicit" , Object.class); - public final static Method DEF_TO_CHAR_EXPLICIT = getAsmMethod(char.class , "DefTocharExplicit" , Object.class); - public final static Method DEF_TO_INT_EXPLICIT = getAsmMethod(int.class , "DefTointExplicit" , Object.class); - public final static Method DEF_TO_LONG_EXPLICIT = getAsmMethod(long.class , "DefTolongExplicit" , Object.class); - public final static Method DEF_TO_FLOAT_EXPLICIT = getAsmMethod(float.class , "DefTofloatExplicit" , Object.class); - public final static Method DEF_TO_DOUBLE_EXPLICIT = getAsmMethod(double.class , "DefTodoubleExplicit", Object.class); + public static final Type DEF_UTIL_TYPE = Type.getType(Def.class); + public static final Method DEF_TO_BOOLEAN = getAsmMethod(boolean.class, "DefToboolean" , Object.class); + public static final Method DEF_TO_BYTE_IMPLICIT = getAsmMethod(byte.class , "DefTobyteImplicit" , Object.class); + public static final Method DEF_TO_SHORT_IMPLICIT = getAsmMethod(short.class , "DefToshortImplicit" , Object.class); + public static final Method DEF_TO_CHAR_IMPLICIT = getAsmMethod(char.class , "DefTocharImplicit" , Object.class); + public static final Method DEF_TO_INT_IMPLICIT = getAsmMethod(int.class , "DefTointImplicit" , Object.class); + public static final Method DEF_TO_LONG_IMPLICIT = getAsmMethod(long.class , "DefTolongImplicit" , Object.class); + public static final Method DEF_TO_FLOAT_IMPLICIT = getAsmMethod(float.class , "DefTofloatImplicit" , Object.class); + public static final Method DEF_TO_DOUBLE_IMPLICIT = getAsmMethod(double.class , "DefTodoubleImplicit", Object.class); + public static final Method DEF_TO_BYTE_EXPLICIT = getAsmMethod(byte.class , "DefTobyteExplicit" , Object.class); + public static final Method DEF_TO_SHORT_EXPLICIT = getAsmMethod(short.class , "DefToshortExplicit" , Object.class); + public static final Method DEF_TO_CHAR_EXPLICIT = getAsmMethod(char.class , "DefTocharExplicit" , Object.class); + public static final Method DEF_TO_INT_EXPLICIT = getAsmMethod(int.class , "DefTointExplicit" , Object.class); + public static final Method DEF_TO_LONG_EXPLICIT = getAsmMethod(long.class , "DefTolongExplicit" , Object.class); + public static final Method DEF_TO_FLOAT_EXPLICIT = getAsmMethod(float.class , "DefTofloatExplicit" , Object.class); + public static final Method DEF_TO_DOUBLE_EXPLICIT = getAsmMethod(double.class , "DefTodoubleExplicit", Object.class); /** invokedynamic bootstrap for lambda expression/method references */ - public final static MethodType LAMBDA_BOOTSTRAP_TYPE = + public static final MethodType LAMBDA_BOOTSTRAP_TYPE = MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class, Object[].class); - public final static Handle LAMBDA_BOOTSTRAP_HANDLE = + public static final Handle LAMBDA_BOOTSTRAP_HANDLE = new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(LambdaMetafactory.class), "altMetafactory", LAMBDA_BOOTSTRAP_TYPE.toMethodDescriptorString(), false); /** dynamic invokedynamic bootstrap for indy string concats (Java 9+) */ - public final static Handle INDY_STRING_CONCAT_BOOTSTRAP_HANDLE; + public static final Handle INDY_STRING_CONCAT_BOOTSTRAP_HANDLE; static { Handle bs; try { @@ -139,24 +139,24 @@ public final class WriterConstants { INDY_STRING_CONCAT_BOOTSTRAP_HANDLE = bs; } - public final static int MAX_INDY_STRING_CONCAT_ARGS = 200; + public static final int MAX_INDY_STRING_CONCAT_ARGS = 200; - public final static Type STRING_TYPE = Type.getType(String.class); - public final static Type STRINGBUILDER_TYPE = Type.getType(StringBuilder.class); + public static final Type STRING_TYPE = Type.getType(String.class); + public static final Type STRINGBUILDER_TYPE = Type.getType(StringBuilder.class); - public final static Method STRINGBUILDER_CONSTRUCTOR = getAsmMethod(void.class, ""); - public final static Method STRINGBUILDER_APPEND_BOOLEAN = getAsmMethod(StringBuilder.class, "append", boolean.class); - public final static Method STRINGBUILDER_APPEND_CHAR = getAsmMethod(StringBuilder.class, "append", char.class); - public final static Method STRINGBUILDER_APPEND_INT = getAsmMethod(StringBuilder.class, "append", int.class); - public final static Method STRINGBUILDER_APPEND_LONG = getAsmMethod(StringBuilder.class, "append", long.class); - public final static Method STRINGBUILDER_APPEND_FLOAT = getAsmMethod(StringBuilder.class, "append", float.class); - public final static Method STRINGBUILDER_APPEND_DOUBLE = getAsmMethod(StringBuilder.class, "append", double.class); - public final static Method STRINGBUILDER_APPEND_STRING = getAsmMethod(StringBuilder.class, "append", String.class); - public final static Method STRINGBUILDER_APPEND_OBJECT = getAsmMethod(StringBuilder.class, "append", Object.class); - public final static Method STRINGBUILDER_TOSTRING = getAsmMethod(String.class, "toString"); + public static final Method STRINGBUILDER_CONSTRUCTOR = getAsmMethod(void.class, ""); + public static final Method STRINGBUILDER_APPEND_BOOLEAN = getAsmMethod(StringBuilder.class, "append", boolean.class); + public static final Method STRINGBUILDER_APPEND_CHAR = getAsmMethod(StringBuilder.class, "append", char.class); + public static final Method STRINGBUILDER_APPEND_INT = getAsmMethod(StringBuilder.class, "append", int.class); + public static final Method STRINGBUILDER_APPEND_LONG = getAsmMethod(StringBuilder.class, "append", long.class); + public static final Method STRINGBUILDER_APPEND_FLOAT = getAsmMethod(StringBuilder.class, "append", float.class); + public static final Method STRINGBUILDER_APPEND_DOUBLE = getAsmMethod(StringBuilder.class, "append", double.class); + public static final Method STRINGBUILDER_APPEND_STRING = getAsmMethod(StringBuilder.class, "append", String.class); + public static final Method STRINGBUILDER_APPEND_OBJECT = getAsmMethod(StringBuilder.class, "append", Object.class); + public static final Method STRINGBUILDER_TOSTRING = getAsmMethod(String.class, "toString"); - public final static Type OBJECTS_TYPE = Type.getType(Objects.class); - public final static Method EQUALS = getAsmMethod(boolean.class, "equals", Object.class, Object.class); + public static final Type OBJECTS_TYPE = Type.getType(Objects.class); + public static final Method EQUALS = getAsmMethod(boolean.class, "equals", Object.class, Object.class); private static Method getAsmMethod(final Class rtype, final String name, final Class... ptypes) { return new Method(name, MethodType.methodType(rtype, ptypes).toMethodDescriptorString()); diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/15_update.yaml b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/15_update.yaml index 555cbaee0cb..8e7e3d787e2 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/15_update.yaml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/15_update.yaml @@ -15,11 +15,11 @@ index: test_1 type: test id: 1 - script: "1" body: - lang: painless - script: "ctx._source.foo = params.bar" - params: { bar: 'xxx' } + script: + lang: painless + inline: "ctx._source.foo = params.bar" + params: { bar: 'xxx' } - match: { _index: test_1 } - match: { _type: test } @@ -40,8 +40,10 @@ index: test_1 type: test id: 1 - lang: painless - script: "ctx._source.foo = 'yyy'" + body: + script: + lang: painless + inline: "ctx._source.foo = 'yyy'" - match: { _index: test_1 } - match: { _type: test } diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/25_script_upsert.yaml b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/25_script_upsert.yaml index 66dee6bcc3a..2adf0de747f 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/25_script_upsert.yaml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/plan_a/25_script_upsert.yaml @@ -7,9 +7,10 @@ type: test id: 1 body: - script: "ctx._source.foo = params.bar" - lang: "painless" - params: { bar: 'xxx' } + script: + inline: "ctx._source.foo = params.bar" + lang: "painless" + params: { bar: 'xxx' } upsert: { foo: baz } - do: @@ -27,9 +28,10 @@ type: test id: 1 body: - script: "ctx._source.foo = params.bar" - lang: "painless" - params: { bar: 'xxx' } + script: + inline: "ctx._source.foo = params.bar" + lang: "painless" + params: { bar: 'xxx' } upsert: { foo: baz } - do: @@ -46,9 +48,10 @@ type: test id: 2 body: - script: "ctx._source.foo = params.bar" - lang: "painless" - params: { bar: 'xxx' } + script: + inline: "ctx._source.foo = params.bar" + lang: "painless" + params: { bar: 'xxx' } upsert: { foo: baz } scripted_upsert: true diff --git a/modules/percolator/build.gradle b/modules/percolator/build.gradle index 4e55fe25521..60fb82bdf4e 100644 --- a/modules/percolator/build.gradle +++ b/modules/percolator/build.gradle @@ -20,7 +20,8 @@ esplugin { description 'Percolator module adds capability to index queries and query these queries by specifying documents' classname 'org.elasticsearch.percolator.PercolatorPlugin' + hasClientJar = true } compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes" -compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes" \ No newline at end of file +compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes" diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateAction.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateAction.java index eefc6c996ba..d6eb5660728 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateAction.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateAction.java @@ -21,6 +21,7 @@ package org.elasticsearch.percolator; import org.elasticsearch.action.Action; import org.elasticsearch.client.ElasticsearchClient; +@Deprecated public class MultiPercolateAction extends Action { public static final MultiPercolateAction INSTANCE = new MultiPercolateAction(); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateResponse.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateResponse.java index c05c0097c90..afc5b7ab6c7 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateResponse.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/MultiPercolateResponse.java @@ -77,7 +77,7 @@ public class MultiPercolateResponse extends ActionResponse implements Iterablefalse is returned. */ public boolean isFailure() { - return throwable != null; + return exception != null; } - public Throwable getFailure() { - return throwable; + public Exception getFailure() { + return exception; } @Override @@ -161,7 +161,7 @@ public class MultiPercolateResponse extends ActionResponse implements Iterable { public static final PercolateAction INSTANCE = new PercolateAction(); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java index 311b612a18d..40218e50a4f 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java @@ -22,13 +22,11 @@ package org.elasticsearch.percolator; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -36,115 +34,39 @@ import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Bits; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.lucene.search.MatchNoDocsQuery; import java.io.IOException; import java.util.Objects; import java.util.Set; -import static org.apache.lucene.search.BooleanClause.Occur.FILTER; - -public final class PercolateQuery extends Query implements Accountable { +final class PercolateQuery extends Query implements Accountable { // cost of matching the query against the document, arbitrary as it would be really complex to estimate public static final float MATCH_COST = 1000; - public static class Builder { - - private final String docType; - private final QueryStore queryStore; - private final BytesReference documentSource; - private final IndexSearcher percolatorIndexSearcher; - - private Query queriesMetaDataQuery; - private Query verifiedQueriesQuery = new MatchNoDocsQuery(""); - private Query percolateTypeQuery; - - /** - * @param docType The type of the document being percolated - * @param queryStore The lookup holding all the percolator queries as Lucene queries. - * @param documentSource The source of the document being percolated - * @param percolatorIndexSearcher The index searcher on top of the in-memory index that holds the document being percolated - */ - public Builder(String docType, QueryStore queryStore, BytesReference documentSource, IndexSearcher percolatorIndexSearcher) { - this.docType = Objects.requireNonNull(docType); - this.queryStore = Objects.requireNonNull(queryStore); - this.documentSource = Objects.requireNonNull(documentSource); - this.percolatorIndexSearcher = Objects.requireNonNull(percolatorIndexSearcher); - } - - /** - * Optionally sets a query that reduces the number of queries to percolate based on extracted terms from - * the document to be percolated. - * @param extractedTermsFieldName The name of the field to get the extracted terms from - * @param extractionResultField The field to indicate for a document whether query term extraction was complete, - * partial or failed. If query extraction was complete, the MemoryIndex doesn't - */ - public void extractQueryTermsQuery(String extractedTermsFieldName, String extractionResultField) throws IOException { - // We can only skip the MemoryIndex verification when percolating a single document. - // When the document being percolated contains a nested object field then the MemoryIndex contains multiple - // documents. In this case the term query that indicates whether memory index verification can be skipped - // can incorrectly indicate that non nested queries would match, while their nested variants would not. - if (percolatorIndexSearcher.getIndexReader().maxDoc() == 1) { - this.verifiedQueriesQuery = new TermQuery(new Term(extractionResultField, ExtractQueryTermsService.EXTRACTION_COMPLETE)); - } - this.queriesMetaDataQuery = ExtractQueryTermsService.createQueryTermsQuery( - percolatorIndexSearcher.getIndexReader(), extractedTermsFieldName, - // include extractionResultField:failed, because docs with this term have no extractedTermsField - // and otherwise we would fail to return these docs. Docs that failed query term extraction - // always need to be verified by MemoryIndex: - new Term(extractionResultField, ExtractQueryTermsService.EXTRACTION_FAILED) - ); - } - - /** - * @param percolateTypeQuery A query that identifies all document containing percolator queries - */ - public void setPercolateTypeQuery(Query percolateTypeQuery) { - this.percolateTypeQuery = Objects.requireNonNull(percolateTypeQuery); - } - - public PercolateQuery build() { - if (percolateTypeQuery != null && queriesMetaDataQuery != null) { - throw new IllegalStateException("Either filter by deprecated percolator type or by query metadata"); - } - // The query that selects which percolator queries will be evaluated by MemoryIndex: - BooleanQuery.Builder queriesQuery = new BooleanQuery.Builder(); - if (percolateTypeQuery != null) { - queriesQuery.add(percolateTypeQuery, FILTER); - } - if (queriesMetaDataQuery != null) { - queriesQuery.add(queriesMetaDataQuery, FILTER); - } - return new PercolateQuery(docType, queryStore, documentSource, queriesQuery.build(), percolatorIndexSearcher, - verifiedQueriesQuery); - } - - } - private final String documentType; private final QueryStore queryStore; private final BytesReference documentSource; - private final Query percolatorQueriesQuery; - private final Query verifiedQueriesQuery; + private final Query candidateMatchesQuery; + private final Query verifiedMatchesQuery; private final IndexSearcher percolatorIndexSearcher; - private PercolateQuery(String documentType, QueryStore queryStore, BytesReference documentSource, - Query percolatorQueriesQuery, IndexSearcher percolatorIndexSearcher, Query verifiedQueriesQuery) { - this.documentType = documentType; - this.documentSource = documentSource; - this.percolatorQueriesQuery = percolatorQueriesQuery; - this.queryStore = queryStore; - this.percolatorIndexSearcher = percolatorIndexSearcher; - this.verifiedQueriesQuery = verifiedQueriesQuery; + PercolateQuery(String documentType, QueryStore queryStore, BytesReference documentSource, + Query candidateMatchesQuery, IndexSearcher percolatorIndexSearcher, Query verifiedMatchesQuery) { + this.documentType = Objects.requireNonNull(documentType); + this.documentSource = Objects.requireNonNull(documentSource); + this.candidateMatchesQuery = Objects.requireNonNull(candidateMatchesQuery); + this.queryStore = Objects.requireNonNull(queryStore); + this.percolatorIndexSearcher = Objects.requireNonNull(percolatorIndexSearcher); + this.verifiedMatchesQuery = Objects.requireNonNull(verifiedMatchesQuery); } @Override public Query rewrite(IndexReader reader) throws IOException { - Query rewritten = percolatorQueriesQuery.rewrite(reader); - if (rewritten != percolatorQueriesQuery) { + Query rewritten = candidateMatchesQuery.rewrite(reader); + if (rewritten != candidateMatchesQuery) { return new PercolateQuery(documentType, queryStore, documentSource, rewritten, percolatorIndexSearcher, - verifiedQueriesQuery); + verifiedMatchesQuery); } else { return this; } @@ -152,8 +74,8 @@ public final class PercolateQuery extends Query implements Accountable { @Override public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - final Weight verifiedQueriesQueryWeight = verifiedQueriesQuery.createWeight(searcher, false); - final Weight innerWeight = percolatorQueriesQuery.createWeight(searcher, needsScores); + final Weight verifiedMatchesWeight = verifiedMatchesQuery.createWeight(searcher, false); + final Weight candidateMatchesWeight = candidateMatchesQuery.createWeight(searcher, false); return new Weight(this) { @Override public void extractTerms(Set set) { @@ -183,17 +105,17 @@ public final class PercolateQuery extends Query implements Accountable { @Override public float getValueForNormalization() throws IOException { - return innerWeight.getValueForNormalization(); + return candidateMatchesWeight.getValueForNormalization(); } @Override public void normalize(float v, float v1) { - innerWeight.normalize(v, v1); + candidateMatchesWeight.normalize(v, v1); } @Override public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { - final Scorer approximation = innerWeight.scorer(leafReaderContext); + final Scorer approximation = candidateMatchesWeight.scorer(leafReaderContext); if (approximation == null) { return null; } @@ -226,7 +148,7 @@ public final class PercolateQuery extends Query implements Accountable { } }; } else { - Scorer verifiedDocsScorer = verifiedQueriesQueryWeight.scorer(leafReaderContext); + Scorer verifiedDocsScorer = verifiedMatchesWeight.scorer(leafReaderContext); Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer); return new BaseScorer(this, approximation, queries, percolatorIndexSearcher) { @@ -292,19 +214,13 @@ public final class PercolateQuery extends Query implements Accountable { @Override public String toString(String s) { - return "PercolateQuery{document_type={" + documentType + "},document_source={" + documentSource.toUtf8() + - "},inner={" + percolatorQueriesQuery.toString(s) + "}}"; + return "PercolateQuery{document_type={" + documentType + "},document_source={" + documentSource.utf8ToString() + + "},inner={" + candidateMatchesQuery.toString(s) + "}}"; } @Override public long ramBytesUsed() { - long sizeInBytes = 0; - if (documentSource.hasArray()) { - sizeInBytes += documentSource.array().length; - } else { - sizeInBytes += documentSource.length(); - } - return sizeInBytes; + return documentSource.ramBytesUsed(); } @FunctionalInterface @@ -321,7 +237,7 @@ public final class PercolateQuery extends Query implements Accountable { } - static abstract class BaseScorer extends Scorer { + abstract static class BaseScorer extends Scorer { final Scorer approximation; final QueryStore.Leaf percolatorQueries; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index 3ce3726aa5e..3acb5c1f68c 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -50,6 +50,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lucene.search.MatchNoDocsQuery; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -57,7 +58,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.FieldNameAnalyzer; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperForType; @@ -81,9 +81,7 @@ import static org.elasticsearch.index.mapper.SourceToParse.source; import static org.elasticsearch.percolator.PercolatorFieldMapper.parseQuery; public class PercolateQueryBuilder extends AbstractQueryBuilder { - public static final String NAME = "percolate"; - public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME); static final ParseField DOCUMENT_FIELD = new ParseField("document"); private static final ParseField QUERY_FIELD = new ParseField("field"); @@ -406,37 +404,27 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder { LeafReader leafReader = ctx.reader(); - BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(fieldType.getQueryBuilderFieldName()); + BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(fieldType.queryBuilderField.name()); if (binaryDocValues == null) { return docId -> null; } - Bits bits = leafReader.getDocsWithField(fieldType.getQueryBuilderFieldName()); + Bits bits = leafReader.getDocsWithField(fieldType.queryBuilderField.name()); return docId -> { if (bits.get(docId)) { BytesRef qbSource = binaryDocValues.get(docId); @@ -541,7 +529,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = + public static final XContentType QUERY_BUILDER_CONTENT_TYPE = XContentType.SMILE; + public static final Setting INDEX_MAP_UNMAPPED_FIELDS_AS_STRING_SETTING = Setting.boolSetting("index.percolator.map_unmapped_fields_as_string", false, Setting.Property.IndexScope); public static final String CONTENT_TYPE = "percolator"; - private static final PercolatorFieldType FIELD_TYPE = new PercolatorFieldType(); + private static final FieldType FIELD_TYPE = new FieldType(); + + static final byte FIELD_VALUE_SEPARATOR = 0; // nul code point + static final String EXTRACTION_COMPLETE = "complete"; + static final String EXTRACTION_PARTIAL = "partial"; + static final String EXTRACTION_FAILED = "failed"; public static final String EXTRACTED_TERMS_FIELD_NAME = "extracted_terms"; public static final String EXTRACTION_RESULT_FIELD_NAME = "extraction_result"; @@ -73,12 +97,13 @@ public class PercolatorFieldMapper extends FieldMapper { @Override public PercolatorFieldMapper build(BuilderContext context) { context.path().add(name()); + FieldType fieldType = (FieldType) this.fieldType; KeywordFieldMapper extractedTermsField = createExtractQueryFieldBuilder(EXTRACTED_TERMS_FIELD_NAME, context); - ((PercolatorFieldType) fieldType).queryTermsField = extractedTermsField.fieldType(); + fieldType.queryTermsField = extractedTermsField.fieldType(); KeywordFieldMapper extractionResultField = createExtractQueryFieldBuilder(EXTRACTION_RESULT_FIELD_NAME, context); - ((PercolatorFieldType) fieldType).extractionResultField = extractionResultField.fieldType(); + fieldType.extractionResultField = extractionResultField.fieldType(); BinaryFieldMapper queryBuilderField = createQueryBuilderFieldBuilder(context); - ((PercolatorFieldType) fieldType).queryBuilderField = queryBuilderField.fieldType(); + fieldType.queryBuilderField = queryBuilderField.fieldType(); context.path().remove(); setupFieldType(context); return new PercolatorFieldMapper(name(), fieldType, defaultFieldType, context.indexSettings(), @@ -113,40 +138,28 @@ public class PercolatorFieldMapper extends FieldMapper { } } - public static class PercolatorFieldType extends MappedFieldType { + public static class FieldType extends MappedFieldType { - private MappedFieldType queryTermsField; - private MappedFieldType extractionResultField; - private MappedFieldType queryBuilderField; + MappedFieldType queryTermsField; + MappedFieldType extractionResultField; + MappedFieldType queryBuilderField; - public PercolatorFieldType() { + public FieldType() { setIndexOptions(IndexOptions.NONE); setDocValuesType(DocValuesType.NONE); setStored(false); } - public PercolatorFieldType(PercolatorFieldType ref) { + public FieldType(FieldType ref) { super(ref); queryTermsField = ref.queryTermsField; extractionResultField = ref.extractionResultField; queryBuilderField = ref.queryBuilderField; } - public String getExtractedTermsField() { - return queryTermsField.name(); - } - - public String getExtractionResultFieldName() { - return extractionResultField.name(); - } - - public String getQueryBuilderFieldName() { - return queryBuilderField.name(); - } - @Override public MappedFieldType clone() { - return new PercolatorFieldType(this); + return new FieldType(this); } @Override @@ -158,6 +171,52 @@ public class PercolatorFieldMapper extends FieldMapper { public Query termQuery(Object value, QueryShardContext context) { throw new QueryShardException(context, "Percolator fields are not searchable directly, use a percolate query instead"); } + + public Query percolateQuery(String documentType, PercolateQuery.QueryStore queryStore, BytesReference documentSource, + IndexSearcher searcher) throws IOException { + IndexReader indexReader = searcher.getIndexReader(); + Query candidateMatchesQuery = createCandidateQuery(indexReader); + Query verifiedMatchesQuery; + // We can only skip the MemoryIndex verification when percolating a single document. + // When the document being percolated contains a nested object field then the MemoryIndex contains multiple + // documents. In this case the term query that indicates whether memory index verification can be skipped + // can incorrectly indicate that non nested queries would match, while their nested variants would not. + if (indexReader.maxDoc() == 1) { + verifiedMatchesQuery = new TermQuery(new Term(extractionResultField.name(), EXTRACTION_COMPLETE)); + } else { + verifiedMatchesQuery = new MatchNoDocsQuery("nested docs, so no verified matches"); + } + return new PercolateQuery(documentType, queryStore, documentSource, candidateMatchesQuery, searcher, verifiedMatchesQuery); + } + + Query createCandidateQuery(IndexReader indexReader) throws IOException { + List extractedTerms = new ArrayList<>(); + // include extractionResultField:failed, because docs with this term have no extractedTermsField + // and otherwise we would fail to return these docs. Docs that failed query term extraction + // always need to be verified by MemoryIndex: + extractedTerms.add(new Term(extractionResultField.name(), EXTRACTION_FAILED)); + + LeafReader reader = indexReader.leaves().get(0).reader(); + Fields fields = reader.fields(); + for (String field : fields) { + Terms terms = fields.terms(field); + if (terms == null) { + continue; + } + + BytesRef fieldBr = new BytesRef(field); + TermsEnum tenum = terms.iterator(); + for (BytesRef term = tenum.next(); term != null; term = tenum.next()) { + BytesRefBuilder builder = new BytesRefBuilder(); + builder.append(fieldBr); + builder.append(FIELD_VALUE_SEPARATOR); + builder.append(term); + extractedTerms.add(new Term(queryTermsField.name(), builder.toBytesRef())); + } + } + return new TermsQuery(extractedTerms); + } + } private final boolean mapUnmappedFieldAsString; @@ -210,22 +269,46 @@ public class PercolatorFieldMapper extends FieldMapper { XContentParser parser = context.parser(); QueryBuilder queryBuilder = parseQueryBuilder(queryShardContext.newParseContext(parser), parser.getTokenLocation()); + verifyRangeQueries(queryBuilder); // Fetching of terms, shapes and indexed scripts happen during this rewrite: queryBuilder = queryBuilder.rewrite(queryShardContext); try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) { queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap())); builder.flush(); - byte[] queryBuilderAsBytes = builder.bytes().toBytes(); + byte[] queryBuilderAsBytes = BytesReference.toBytes(builder.bytes()); context.doc().add(new Field(queryBuilderField.name(), queryBuilderAsBytes, queryBuilderField.fieldType())); } Query query = toQuery(queryShardContext, mapUnmappedFieldAsString, queryBuilder); - ExtractQueryTermsService.extractQueryTerms(query, context.doc(), queryTermsField.name(), extractionResultField.name(), - queryTermsField.fieldType()); + processQuery(query, context); return null; } + void processQuery(Query query, ParseContext context) { + ParseContext.Document doc = context.doc(); + FieldType pft = (FieldType) this.fieldType(); + QueryAnalyzer.Result result; + try { + result = QueryAnalyzer.analyze(query); + } catch (QueryAnalyzer.UnsupportedQueryException e) { + doc.add(new Field(pft.extractionResultField.name(), EXTRACTION_FAILED, extractionResultField.fieldType())); + return; + } + for (Term term : result.terms) { + BytesRefBuilder builder = new BytesRefBuilder(); + builder.append(new BytesRef(term.field())); + builder.append(FIELD_VALUE_SEPARATOR); + builder.append(term.bytes()); + doc.add(new Field(queryTermsField.name(), builder.toBytesRef(), queryTermsField.fieldType())); + } + if (result.verified) { + doc.add(new Field(extractionResultField.name(), EXTRACTION_COMPLETE, extractionResultField.fieldType())); + } else { + doc.add(new Field(extractionResultField.name(), EXTRACTION_PARTIAL, extractionResultField.fieldType())); + } + } + public static Query parseQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, XContentParser parser) throws IOException { return toQuery(context, mapUnmappedFieldsAsString, parseQueryBuilder(context.newParseContext(parser), parser.getTokenLocation())); } @@ -272,4 +355,38 @@ public class PercolatorFieldMapper extends FieldMapper { return CONTENT_TYPE; } + /** + * Fails if a range query with a date range is found based on current time + */ + static void verifyRangeQueries(QueryBuilder queryBuilder) { + if (queryBuilder instanceof RangeQueryBuilder) { + RangeQueryBuilder rangeQueryBuilder = (RangeQueryBuilder) queryBuilder; + if (rangeQueryBuilder.from() instanceof String) { + String from = (String) rangeQueryBuilder.from(); + String to = (String) rangeQueryBuilder.to(); + if (from.contains("now") || to.contains("now")) { + throw new IllegalArgumentException("Percolator queries containing time range queries based on the " + + "current time are forbidden"); + } + } + } else if (queryBuilder instanceof BoolQueryBuilder) { + BoolQueryBuilder boolQueryBuilder = (BoolQueryBuilder) queryBuilder; + List clauses = new ArrayList<>(); + clauses.addAll(boolQueryBuilder.filter()); + clauses.addAll(boolQueryBuilder.must()); + clauses.addAll(boolQueryBuilder.mustNot()); + clauses.addAll(boolQueryBuilder.should()); + for (QueryBuilder clause : clauses) { + verifyRangeQueries(clause); + } + } else if (queryBuilder instanceof ConstantScoreQueryBuilder) { + verifyRangeQueries(((ConstantScoreQueryBuilder) queryBuilder).innerQuery()); + } else if (queryBuilder instanceof FunctionScoreQueryBuilder) { + verifyRangeQueries(((FunctionScoreQueryBuilder) queryBuilder).query()); + } else if (queryBuilder instanceof BoostingQueryBuilder) { + verifyRangeQueries(((BoostingQueryBuilder) queryBuilder).negativeQuery()); + verifyRangeQueries(((BoostingQueryBuilder) queryBuilder).positiveQuery()); + } + } + } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java index 4a110172d77..2e9fd517d53 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java @@ -25,15 +25,17 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.text.Text; import org.elasticsearch.index.query.ParsedQuery; -import org.elasticsearch.search.Highlighters; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.highlight.HighlightPhase; +import org.elasticsearch.search.highlight.Highlighter; import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.SearchContext; @@ -42,6 +44,7 @@ import org.elasticsearch.search.internal.SubSearchContext; import java.io.IOException; import java.util.Collections; import java.util.List; +import java.util.Map; /** * Highlighting in the case of the percolate query is a bit different, because the PercolateQuery itself doesn't get highlighted, @@ -49,7 +52,7 @@ import java.util.List; */ public final class PercolatorHighlightSubFetchPhase extends HighlightPhase { - public PercolatorHighlightSubFetchPhase(Settings settings, Highlighters highlighters) { + public PercolatorHighlightSubFetchPhase(Settings settings, Map highlighters) { super(settings, highlighters); } @@ -110,10 +113,19 @@ public final class PercolatorHighlightSubFetchPhase extends HighlightPhase { return result; } } + } else if (query instanceof DisjunctionMaxQuery) { + for (Query disjunct : ((DisjunctionMaxQuery) query).getDisjuncts()) { + PercolateQuery result = locatePercolatorQuery(disjunct); + if (result != null) { + return result; + } + } } else if (query instanceof ConstantScoreQuery) { return locatePercolatorQuery(((ConstantScoreQuery) query).getQuery()); } else if (query instanceof BoostQuery) { return locatePercolatorQuery(((BoostQuery) query).getQuery()); + } else if (query instanceof FunctionScoreQuery) { + return locatePercolatorQuery(((FunctionScoreQuery) query).getSubQuery()); } return null; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java index 4359568b3f6..8b602e3c478 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorPlugin.java @@ -27,15 +27,18 @@ import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.fetch.FetchSubPhase; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; -public class PercolatorPlugin extends Plugin implements MapperPlugin, ActionPlugin { +import static java.util.Collections.singletonList; + +public class PercolatorPlugin extends Plugin implements MapperPlugin, ActionPlugin, SearchPlugin { private final Settings settings; @@ -54,9 +57,14 @@ public class PercolatorPlugin extends Plugin implements MapperPlugin, ActionPlug return Arrays.asList(RestPercolateAction.class, RestMultiPercolateAction.class); } - public void onModule(SearchModule module) { - module.registerQuery(PercolateQueryBuilder::new, PercolateQueryBuilder::fromXContent, PercolateQueryBuilder.QUERY_NAME_FIELD); - module.registerFetchSubPhase(new PercolatorHighlightSubFetchPhase(settings, module.getHighlighters())); + @Override + public List> getQueries() { + return singletonList(new QuerySpec<>(PercolateQueryBuilder.NAME, PercolateQueryBuilder::new, PercolateQueryBuilder::fromXContent)); + } + + @Override + public List getFetchSubPhases(FetchPhaseConstructionContext context) { + return singletonList(new PercolatorHighlightSubFetchPhase(settings, context.getHighlighters())); } @Override diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java similarity index 72% rename from modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java rename to modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index 147eae6e4d1..8154c632907 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/ExtractQueryTermsService.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -18,15 +18,8 @@ */ package org.elasticsearch.percolator; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.index.Fields; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.MultiFields; import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.index.Term; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; import org.apache.lucene.queries.BlendedTermQuery; import org.apache.lucene.queries.CommonTermsQuery; import org.apache.lucene.queries.TermsQuery; @@ -46,36 +39,25 @@ import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.lucene.search.MatchNoDocsQuery; -import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; -import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; import java.util.function.Function; -/** - * Utility to extract query terms from queries and create queries from documents. - */ -public final class ExtractQueryTermsService { +public final class QueryAnalyzer { - private static final byte FIELD_VALUE_SEPARATOR = 0; // nul code point - public static final String EXTRACTION_COMPLETE = "complete"; - public static final String EXTRACTION_PARTIAL = "partial"; - public static final String EXTRACTION_FAILED = "failed"; - - static final Map, Function> queryProcessors; + private static final Map, Function> queryProcessors; static { - Map, Function> map = new HashMap<>(16); + Map, Function> map = new HashMap<>(); map.put(MatchNoDocsQuery.class, matchNoDocsQuery()); map.put(ConstantScoreQuery.class, constantScoreQuery()); map.put(BoostQuery.class, boostQuery()); @@ -92,86 +74,38 @@ public final class ExtractQueryTermsService { map.put(BooleanQuery.class, booleanQuery()); map.put(DisjunctionMaxQuery.class, disjunctionMaxQuery()); map.put(SynonymQuery.class, synonymQuery()); + map.put(FunctionScoreQuery.class, functionScoreQuery()); queryProcessors = Collections.unmodifiableMap(map); } - private ExtractQueryTermsService() { + private QueryAnalyzer() { } /** - * Extracts all terms from the specified query and adds it to the specified document. + * Extracts terms from the provided query. These terms are stored with the percolator query and + * used by the percolate query's candidate query as fields to be query by. The candidate query + * holds the terms from the document to be percolated and allows to the percolate query to ignore + * percolator queries that we know would otherwise never match. * - * @param query The query to extract terms from - * @param document The document to add the extracted terms to - * @param queryTermsFieldField The field in the document holding the extracted terms - * @param extractionResultField The field contains whether query term extraction was successful, partial or - * failed. (For example the query contained an unsupported query (e.g. WildcardQuery) - * then query extraction would fail) - * @param fieldType The field type for the query metadata field - */ - public static void extractQueryTerms(Query query, ParseContext.Document document, String queryTermsFieldField, - String extractionResultField, FieldType fieldType) { - Result result; - try { - result = extractQueryTerms(query); - } catch (UnsupportedQueryException e) { - document.add(new Field(extractionResultField, EXTRACTION_FAILED, fieldType)); - return; - } - for (Term term : result.terms) { - BytesRefBuilder builder = new BytesRefBuilder(); - builder.append(new BytesRef(term.field())); - builder.append(FIELD_VALUE_SEPARATOR); - builder.append(term.bytes()); - document.add(new Field(queryTermsFieldField, builder.toBytesRef(), fieldType)); - } - if (result.verified) { - document.add(new Field(extractionResultField, EXTRACTION_COMPLETE, fieldType)); - } else { - document.add(new Field(extractionResultField, EXTRACTION_PARTIAL, fieldType)); - } - } - - /** - * Creates a terms query containing all terms from all fields of the specified index reader. - */ - public static Query createQueryTermsQuery(IndexReader indexReader, String queryMetadataField, - Term... optionalTerms) throws IOException { - Objects.requireNonNull(queryMetadataField); - - List extractedTerms = new ArrayList<>(); - Collections.addAll(extractedTerms, optionalTerms); - - Fields fields = MultiFields.getFields(indexReader); - for (String field : fields) { - Terms terms = fields.terms(field); - if (terms == null) { - continue; - } - - BytesRef fieldBr = new BytesRef(field); - TermsEnum tenum = terms.iterator(); - for (BytesRef term = tenum.next(); term != null; term = tenum.next()) { - BytesRefBuilder builder = new BytesRefBuilder(); - builder.append(fieldBr); - builder.append(FIELD_VALUE_SEPARATOR); - builder.append(term); - extractedTerms.add(new Term(queryMetadataField, builder.toBytesRef())); - } - } - return new TermsQuery(extractedTerms); - } - - /** - * Extracts all query terms from the provided query and adds it to specified list. *

- * From boolean query with no should clauses or phrase queries only the longest term are selected, + * When extracting the terms for the specified query, we can also determine if the percolator query is + * always going to match. For example if a percolator query just contains a term query or a disjunction + * query then when the candidate query matches with that, we know the entire percolator query always + * matches. This allows the percolate query to skip the expensive memory index verification step that + * it would otherwise have to execute (for example when a percolator query contains a phrase query or a + * conjunction query). + * + *

+ * The query analyzer doesn't always extract all terms from the specified query. For example from a + * boolean query with no should clauses or phrase queries only the longest term are selected, * since that those terms are likely to be the rarest. Boolean query's must_not clauses are always ignored. + * *

- * If from part of the query, no query terms can be extracted then term extraction is stopped and - * an UnsupportedQueryException is thrown. + * Sometimes the query analyzer can't always extract terms from a sub query, if that happens then + * query analysis is stopped and an UnsupportedQueryException is thrown. So that the caller can mark + * this query in such a way that the PercolatorQuery always verifies if this query with the MemoryIndex. */ - static Result extractQueryTerms(Query query) { + public static Result analyze(Query query) { Class queryClass = query.getClass(); if (queryClass.isAnonymousClass()) { // Sometimes queries have anonymous classes in that case we need the direct super class. @@ -193,14 +127,14 @@ public final class ExtractQueryTermsService { static Function constantScoreQuery() { return query -> { Query wrappedQuery = ((ConstantScoreQuery) query).getQuery(); - return extractQueryTerms(wrappedQuery); + return analyze(wrappedQuery); }; } static Function boostQuery() { return query -> { Query wrappedQuery = ((BoostQuery) query).getQuery(); - return extractQueryTerms(wrappedQuery); + return analyze(wrappedQuery); }; } @@ -275,7 +209,7 @@ public final class ExtractQueryTermsService { Set bestClauses = null; SpanNearQuery spanNearQuery = (SpanNearQuery) query; for (SpanQuery clause : spanNearQuery.getClauses()) { - Result temp = extractQueryTerms(clause); + Result temp = analyze(clause); bestClauses = selectTermListWithTheLongestShortestTerm(temp.terms, bestClauses); } return new Result(false, bestClauses); @@ -287,7 +221,7 @@ public final class ExtractQueryTermsService { Set terms = new HashSet<>(); SpanOrQuery spanOrQuery = (SpanOrQuery) query; for (SpanQuery clause : spanOrQuery.getClauses()) { - terms.addAll(extractQueryTerms(clause).terms); + terms.addAll(analyze(clause).terms); } return new Result(false, terms); }; @@ -295,14 +229,14 @@ public final class ExtractQueryTermsService { static Function spanNotQuery() { return query -> { - Result result = extractQueryTerms(((SpanNotQuery) query).getInclude()); + Result result = analyze(((SpanNotQuery) query).getInclude()); return new Result(false, result.terms); }; } static Function spanFirstQuery() { return query -> { - Result result = extractQueryTerms(((SpanFirstQuery) query).getMatch()); + Result result = analyze(((SpanFirstQuery) query).getMatch()); return new Result(false, result.terms); }; } @@ -339,7 +273,7 @@ public final class ExtractQueryTermsService { Result temp; try { - temp = extractQueryTerms(clause.getQuery()); + temp = analyze(clause.getQuery()); } catch (UnsupportedQueryException e) { uqe = e; continue; @@ -376,11 +310,24 @@ public final class ExtractQueryTermsService { }; } + static Function functionScoreQuery() { + return query -> { + FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) query; + Result result = analyze(functionScoreQuery.getSubQuery()); + // If min_score is specified we can't guarantee upfront that this percolator query matches, + // so in that case we set verified to false. + // (if it matches with the percolator document matches with the extracted terms. + // Min score filters out docs, which is different than the functions, which just influences the score.) + boolean verified = functionScoreQuery.getMinScore() == null; + return new Result(verified, result.terms); + }; + } + static Result handleDisjunction(List disjunctions, int minimumShouldMatch, boolean otherClauses) { boolean verified = minimumShouldMatch <= 1 && otherClauses == false; Set terms = new HashSet<>(); for (Query disjunct : disjunctions) { - Result subResult = extractQueryTerms(disjunct); + Result subResult = analyze(disjunct); if (subResult.verified == false) { verified = false; } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/RestMultiPercolateAction.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/RestMultiPercolateAction.java index 41de2de42d1..3045fa08a09 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/RestMultiPercolateAction.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/RestMultiPercolateAction.java @@ -19,7 +19,7 @@ package org.elasticsearch.percolator; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -33,13 +33,14 @@ import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; +@Deprecated public class RestMultiPercolateAction extends BaseRestHandler { private final boolean allowExplicitIndex; @Inject - public RestMultiPercolateAction(Settings settings, RestController controller, Client client) { - super(settings, client); + public RestMultiPercolateAction(Settings settings, RestController controller) { + super(settings); controller.registerHandler(POST, "/_mpercolate", this); controller.registerHandler(POST, "/{index}/_mpercolate", this); controller.registerHandler(POST, "/{index}/{type}/_mpercolate", this); @@ -52,7 +53,7 @@ public class RestMultiPercolateAction extends BaseRestHandler { } @Override - public void handleRequest(final RestRequest restRequest, final RestChannel restChannel, final Client client) throws Exception { + public void handleRequest(final RestRequest restRequest, final RestChannel restChannel, final NodeClient client) throws Exception { MultiPercolateRequest multiPercolateRequest = new MultiPercolateRequest(); multiPercolateRequest.indicesOptions(IndicesOptions.fromRequest(restRequest, multiPercolateRequest.indicesOptions())); multiPercolateRequest.indices(Strings.splitStringByCommaToArray(restRequest.param("index"))); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/RestPercolateAction.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/RestPercolateAction.java index 6dffd5518c8..1f8c99b2e97 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/RestPercolateAction.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/RestPercolateAction.java @@ -20,7 +20,7 @@ package org.elasticsearch.percolator; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -35,28 +35,28 @@ import org.elasticsearch.rest.action.support.RestToXContentListener; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; +@Deprecated public class RestPercolateAction extends BaseRestHandler { @Inject - public RestPercolateAction(Settings settings, RestController controller, Client client) { - super(settings, client); + public RestPercolateAction(Settings settings, RestController controller) { + super(settings); controller.registerHandler(GET, "/{index}/{type}/_percolate", this); controller.registerHandler(POST, "/{index}/{type}/_percolate", this); - RestPercolateExistingDocHandler existingDocHandler = new RestPercolateExistingDocHandler(settings, controller, client); + RestPercolateExistingDocHandler existingDocHandler = new RestPercolateExistingDocHandler(settings); controller.registerHandler(GET, "/{index}/{type}/{id}/_percolate", existingDocHandler); controller.registerHandler(POST, "/{index}/{type}/{id}/_percolate", existingDocHandler); - RestCountPercolateDocHandler countHandler = new RestCountPercolateDocHandler(settings, controller, client); + RestCountPercolateDocHandler countHandler = new RestCountPercolateDocHandler(settings); controller.registerHandler(GET, "/{index}/{type}/_percolate/count", countHandler); controller.registerHandler(POST, "/{index}/{type}/_percolate/count", countHandler); - RestCountPercolateExistingDocHandler countExistingDocHandler = new RestCountPercolateExistingDocHandler(settings, controller, - client); + RestCountPercolateExistingDocHandler countExistingDocHandler = new RestCountPercolateExistingDocHandler(settings); controller.registerHandler(GET, "/{index}/{type}/{id}/_percolate/count", countExistingDocHandler); controller.registerHandler(POST, "/{index}/{type}/{id}/_percolate/count", countExistingDocHandler); } - void parseDocPercolate(PercolateRequest percolateRequest, RestRequest restRequest, RestChannel restChannel, final Client client) { + void parseDocPercolate(PercolateRequest percolateRequest, RestRequest restRequest, RestChannel restChannel, NodeClient client) { percolateRequest.indices(Strings.splitStringByCommaToArray(restRequest.param("index"))); percolateRequest.documentType(restRequest.param("type")); percolateRequest.routing(restRequest.param("routing")); @@ -68,7 +68,7 @@ public class RestPercolateAction extends BaseRestHandler { } void parseExistingDocPercolate(PercolateRequest percolateRequest, RestRequest restRequest, RestChannel restChannel, - final Client client) { + NodeClient client) { String index = restRequest.param("index"); String type = restRequest.param("type"); percolateRequest.indices(Strings.splitStringByCommaToArray(restRequest.param("percolate_index", index))); @@ -92,24 +92,24 @@ public class RestPercolateAction extends BaseRestHandler { executePercolate(client, percolateRequest, restChannel); } - void executePercolate(final Client client, final PercolateRequest percolateRequest, final RestChannel restChannel) { + void executePercolate(final NodeClient client, final PercolateRequest percolateRequest, final RestChannel restChannel) { client.execute(PercolateAction.INSTANCE, percolateRequest, new RestToXContentListener<>(restChannel)); } @Override - public void handleRequest(RestRequest restRequest, RestChannel restChannel, final Client client) { + public void handleRequest(RestRequest restRequest, RestChannel restChannel, final NodeClient client) { PercolateRequest percolateRequest = new PercolateRequest(); parseDocPercolate(percolateRequest, restRequest, restChannel, client); } final class RestCountPercolateDocHandler extends BaseRestHandler { - private RestCountPercolateDocHandler(Settings settings, final RestController controller, Client client) { - super(settings, client); + private RestCountPercolateDocHandler(Settings settings) { + super(settings); } @Override - public void handleRequest(RestRequest restRequest, RestChannel restChannel, final Client client) { + public void handleRequest(RestRequest restRequest, RestChannel restChannel, final NodeClient client) { PercolateRequest percolateRequest = new PercolateRequest(); percolateRequest.onlyCount(true); parseDocPercolate(percolateRequest, restRequest, restChannel, client); @@ -118,12 +118,12 @@ public class RestPercolateAction extends BaseRestHandler { final class RestPercolateExistingDocHandler extends BaseRestHandler { - protected RestPercolateExistingDocHandler(Settings settings, final RestController controller, Client client) { - super(settings, client); + protected RestPercolateExistingDocHandler(Settings settings) { + super(settings); } @Override - public void handleRequest(RestRequest restRequest, RestChannel restChannel, final Client client) { + public void handleRequest(RestRequest restRequest, RestChannel restChannel, final NodeClient client) { PercolateRequest percolateRequest = new PercolateRequest(); parseExistingDocPercolate(percolateRequest, restRequest, restChannel, client); } @@ -131,12 +131,12 @@ public class RestPercolateAction extends BaseRestHandler { final class RestCountPercolateExistingDocHandler extends BaseRestHandler { - protected RestCountPercolateExistingDocHandler(Settings settings, final RestController controller, Client client) { - super(settings, client); + protected RestCountPercolateExistingDocHandler(Settings settings) { + super(settings); } @Override - public void handleRequest(RestRequest restRequest, RestChannel restChannel, final Client client) { + public void handleRequest(RestRequest restRequest, RestChannel restChannel, final NodeClient client) { PercolateRequest percolateRequest = new PercolateRequest(); percolateRequest.onlyCount(true); parseExistingDocPercolate(percolateRequest, restRequest, restChannel, client); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/TransportMultiPercolateAction.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/TransportMultiPercolateAction.java index 2b9f77ab82a..0bd8b15bfb7 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/TransportMultiPercolateAction.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/TransportMultiPercolateAction.java @@ -49,6 +49,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +@Deprecated public class TransportMultiPercolateAction extends HandledTransportAction { private final Client client; @@ -107,7 +108,7 @@ public class TransportMultiPercolateAction extends HandledTransportAction { private final Client client; @@ -89,7 +90,7 @@ public class TransportPercolateAction extends HandledTransportAction queries; + private PercolateQuery.QueryStore queryStore; + + @Override + protected Collection> getPlugins() { + return Collections.singleton(PercolatorPlugin.class); + } + + @Before + public void init() throws Exception { + directory = newDirectory(); + IndexWriterConfig config = new IndexWriterConfig(new WhitespaceAnalyzer()); + config.setMergePolicy(NoMergePolicy.INSTANCE); + indexWriter = new IndexWriter(directory, config); + + String indexName = "test"; + IndexService indexService = createIndex(indexName, Settings.EMPTY); + mapperService = indexService.mapperService(); + + String mapper = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties") + .startObject("int_field").field("type", "integer").endObject() + .startObject("long_field").field("type", "long").endObject() + .startObject("half_float_field").field("type", "half_float").endObject() + .startObject("float_field").field("type", "float").endObject() + .startObject("double_field").field("type", "double").endObject() + .startObject("ip_field").field("type", "ip").endObject() + .startObject("field").field("type", "keyword").endObject() + .endObject().endObject().endObject().string(); + documentMapper = mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE, true); + + String queryField = "query_field"; + String mappingType = "query"; + String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(mappingType) + .startObject("properties").startObject(queryField).field("type", "percolator").endObject().endObject() + .endObject().endObject().string(); + mapperService.merge(mappingType, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); + fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper(mappingType).mappers().getMapper(queryField); + fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType(); + + queries = new ArrayList<>(); + queryStore = ctx -> docId -> this.queries.get(docId); + } + + @After + public void deinit() throws Exception { + directoryReader.close(); + directory.close(); + } + + public void testDuel() throws Exception { + List> queryFunctions = new ArrayList<>(); + queryFunctions.add((id) -> new PrefixQuery(new Term("field", id))); + queryFunctions.add((id) -> new WildcardQuery(new Term("field", id + "*"))); + queryFunctions.add((id) -> new CustomQuery(new Term("field", id))); + queryFunctions.add((id) -> new SpanTermQuery(new Term("field", id))); + queryFunctions.add((id) -> new TermQuery(new Term("field", id))); + queryFunctions.add((id) -> { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + return builder.build(); + }); + queryFunctions.add((id) -> { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.MUST); + if (randomBoolean()) { + builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT); + } + if (randomBoolean()) { + builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.MUST); + } + return builder.build(); + }); + queryFunctions.add((id) -> { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.SHOULD); + if (randomBoolean()) { + builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT); + } + if (randomBoolean()) { + builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.SHOULD); + } + return builder.build(); + }); + queryFunctions.add((id) -> { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); + if (randomBoolean()) { + builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT); + } + return builder.build(); + }); + queryFunctions.add((id) -> { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); + builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); + if (randomBoolean()) { + builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT); + } + return builder.build(); + }); + queryFunctions.add((id) -> { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.setMinimumNumberShouldMatch(randomIntBetween(0, 4)); + builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.SHOULD); + builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.SHOULD); + return builder.build(); + }); + queryFunctions.add((id) -> new MatchAllDocsQuery()); + queryFunctions.add((id) -> new MatchNoDocsQuery("no reason at all")); + + int numDocs = randomIntBetween(queryFunctions.size(), queryFunctions.size() * 3); + List documents = new ArrayList<>(); + for (int i = 0; i < numDocs; i++) { + String id = Integer.toString(i); + Query query = queryFunctions.get(i % queryFunctions.size()).apply(id); + addQuery(query, documents); + } + + indexWriter.addDocuments(documents); + indexWriter.close(); + directoryReader = DirectoryReader.open(directory); + IndexSearcher shardSearcher = newSearcher(directoryReader); + // Disable query cache, because ControlQuery cannot be cached... + shardSearcher.setQueryCache(null); + + for (int i = 0; i < numDocs; i++) { + String id = Integer.toString(i); + Iterable doc = Collections.singleton(new StringField("field", id, Field.Store.NO)); + MemoryIndex memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer()); + duelRun(queryStore, memoryIndex, shardSearcher); + } + + Iterable doc = Collections.singleton(new StringField("field", "value", Field.Store.NO)); + MemoryIndex memoryIndex = MemoryIndex.fromDocument(doc, new WhitespaceAnalyzer()); + duelRun(queryStore, memoryIndex, shardSearcher); + // Empty percolator doc: + memoryIndex = new MemoryIndex(); + duelRun(queryStore, memoryIndex, shardSearcher); + } + + public void testDuelSpecificQueries() throws Exception { + List documents = new ArrayList<>(); + + CommonTermsQuery commonTermsQuery = new CommonTermsQuery(BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, 128); + commonTermsQuery.add(new Term("field", "quick")); + commonTermsQuery.add(new Term("field", "brown")); + commonTermsQuery.add(new Term("field", "fox")); + addQuery(commonTermsQuery, documents); + + BlendedTermQuery blendedTermQuery = BlendedTermQuery.booleanBlendedQuery(new Term[]{new Term("field", "quick"), + new Term("field", "brown"), new Term("field", "fox")}, false); + addQuery(blendedTermQuery, documents); + + SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("field", true) + .addClause(new SpanTermQuery(new Term("field", "quick"))) + .addClause(new SpanTermQuery(new Term("field", "brown"))) + .addClause(new SpanTermQuery(new Term("field", "fox"))) + .build(); + addQuery(spanNearQuery, documents); + + SpanNearQuery spanNearQuery2 = new SpanNearQuery.Builder("field", true) + .addClause(new SpanTermQuery(new Term("field", "the"))) + .addClause(new SpanTermQuery(new Term("field", "lazy"))) + .addClause(new SpanTermQuery(new Term("field", "doc"))) + .build(); + SpanOrQuery spanOrQuery = new SpanOrQuery( + spanNearQuery, + spanNearQuery2 + ); + addQuery(spanOrQuery, documents); + + SpanNotQuery spanNotQuery = new SpanNotQuery(spanNearQuery, spanNearQuery); + addQuery(spanNotQuery, documents); + + long lowerLong = randomIntBetween(0, 256); + long upperLong = lowerLong + randomIntBetween(0, 32); + addQuery(LongPoint.newRangeQuery("long_field", lowerLong, upperLong), documents); + + indexWriter.addDocuments(documents); + indexWriter.close(); + directoryReader = DirectoryReader.open(directory); + IndexSearcher shardSearcher = newSearcher(directoryReader); + // Disable query cache, because ControlQuery cannot be cached... + shardSearcher.setQueryCache(null); + + Document document = new Document(); + document.add(new TextField("field", "the quick brown fox jumps over the lazy dog", Field.Store.NO)); + long randomLong = randomIntBetween((int) lowerLong, (int) upperLong); + document.add(new LongPoint("long_field", randomLong)); + MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer()); + duelRun(queryStore, memoryIndex, shardSearcher); + } + + private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher) throws IOException { + boolean requireScore = randomBoolean(); + IndexSearcher percolateSearcher = memoryIndex.createSearcher(); + Query percolateQuery = fieldType.percolateQuery("type", queryStore, new BytesArray("{}"), percolateSearcher); + Query query = requireScore ? percolateQuery : new ConstantScoreQuery(percolateQuery); + TopDocs topDocs = shardSearcher.search(query, 10); + + Query controlQuery = new ControlQuery(memoryIndex, queryStore); + controlQuery = requireScore ? controlQuery : new ConstantScoreQuery(controlQuery); + TopDocs controlTopDocs = shardSearcher.search(controlQuery, 10); + assertThat(topDocs.totalHits, equalTo(controlTopDocs.totalHits)); + assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length)); + for (int j = 0; j < topDocs.scoreDocs.length; j++) { + assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc)); + assertThat(topDocs.scoreDocs[j].score, equalTo(controlTopDocs.scoreDocs[j].score)); + if (requireScore) { + Explanation explain1 = shardSearcher.explain(query, topDocs.scoreDocs[j].doc); + Explanation explain2 = shardSearcher.explain(controlQuery, controlTopDocs.scoreDocs[j].doc); + assertThat(explain1.isMatch(), equalTo(explain2.isMatch())); + assertThat(explain1.getValue(), equalTo(explain2.getValue())); + } + } + } + + private void addQuery(Query query, List docs) throws IOException { + ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, + mapperService.documentMapperParser(), documentMapper, null, null); + fieldMapper.processQuery(query, parseContext); + docs.add(parseContext.doc()); + queries.add(query); + } + + private static final class CustomQuery extends Query { + + private final Term term; + + private CustomQuery(Term term) { + this.term = term; + } + + @Override + public Query rewrite(IndexReader reader) throws IOException { + return new TermQuery(term); + } + + @Override + public String toString(String field) { + return "custom{" + field + "}"; + } + + @Override + public boolean equals(Object obj) { + return sameClassAs(obj); + } + + @Override + public int hashCode() { + return classHash(); + } + } + + private static final class ControlQuery extends Query { + + private final MemoryIndex memoryIndex; + private final PercolateQuery.QueryStore queryStore; + + private ControlQuery(MemoryIndex memoryIndex, PercolateQuery.QueryStore queryStore) { + this.memoryIndex = memoryIndex; + this.queryStore = queryStore; + } + + @Override + public Weight createWeight(IndexSearcher searcher, boolean needsScores) { + return new ConstantScoreWeight(this) { + + float _score; + + @Override + public Explanation explain(LeafReaderContext context, int doc) throws IOException { + Scorer scorer = scorer(context); + if (scorer != null) { + int result = scorer.iterator().advance(doc); + if (result == doc) { + return Explanation.match(scorer.score(), "ControlQuery"); + } + } + return Explanation.noMatch("ControlQuery"); + } + + @Override + public String toString() { + return "weight(" + ControlQuery.this + ")"; + } + + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + DocIdSetIterator allDocs = DocIdSetIterator.all(context.reader().maxDoc()); + PercolateQuery.QueryStore.Leaf leaf = queryStore.getQueries(context); + FilteredDocIdSetIterator memoryIndexIterator = new FilteredDocIdSetIterator(allDocs) { + + @Override + protected boolean match(int doc) { + try { + Query query = leaf.getQuery(doc); + float score = memoryIndex.search(query); + if (score != 0f) { + if (needsScores) { + _score = score; + } + return true; + } else { + return false; + } + } catch (IOException e) { + throw new RuntimeException(e); + } + } + }; + return new FilterScorer(new ConstantScoreScorer(this, score(), memoryIndexIterator)) { + + @Override + public float score() throws IOException { + return _score; + } + }; + } + }; + } + + @Override + public String toString(String field) { + return "control{" + field + "}"; + } + + @Override + public boolean equals(Object obj) { + return sameClassAs(obj); + } + + @Override + public int hashCode() { + return classHash(); + } + + } + +} diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java index 15d531467d4..0a359376f7b 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/MultiPercolatorIT.java @@ -53,8 +53,8 @@ import static org.hamcrest.Matchers.nullValue; public class MultiPercolatorIT extends ESIntegTestCase { - private final static String INDEX_NAME = "queries"; - private final static String TYPE_NAME = "query"; + private static final String INDEX_NAME = "queries"; + private static final String TYPE_NAME = "query"; @Override protected Collection> nodePlugins() { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index f51e2b66af9..09cb4a10aa1 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -189,7 +189,7 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase queries; - private PercolateQuery.QueryStore queryStore; private DirectoryReader directoryReader; @Before public void init() throws Exception { directory = newDirectory(); - queries = new HashMap<>(); - queryStore = ctx -> docId -> { - try { - String val = ctx.reader().document(docId).get(UidFieldMapper.NAME); - return queries.get(Uid.createUid(val).id()); - } catch (IOException e) { - throw new RuntimeException(e); - } - }; IndexWriterConfig config = new IndexWriterConfig(new WhitespaceAnalyzer()); config.setMergePolicy(NoMergePolicy.INSTANCE); indexWriter = new IndexWriter(directory, config); @@ -121,31 +76,38 @@ public class PercolateQueryTests extends ESTestCase { directory.close(); } - public void testVariousQueries() throws Exception { - addPercolatorQuery("1", new TermQuery(new Term("field", "brown"))); - addPercolatorQuery("2", new TermQuery(new Term("field", "monkey"))); - addPercolatorQuery("3", new TermQuery(new Term("field", "fox"))); - BooleanQuery.Builder bq1 = new BooleanQuery.Builder(); - bq1.add(new TermQuery(new Term("field", "fox")), BooleanClause.Occur.SHOULD); - bq1.add(new TermQuery(new Term("field", "monkey")), BooleanClause.Occur.SHOULD); - addPercolatorQuery("4", bq1.build()); - BooleanQuery.Builder bq2 = new BooleanQuery.Builder(); - bq2.add(new TermQuery(new Term("field", "fox")), BooleanClause.Occur.MUST); - bq2.add(new TermQuery(new Term("field", "monkey")), BooleanClause.Occur.MUST); - addPercolatorQuery("5", bq2.build()); - BooleanQuery.Builder bq3 = new BooleanQuery.Builder(); - bq3.add(new TermQuery(new Term("field", "fox")), BooleanClause.Occur.MUST); - bq3.add(new TermQuery(new Term("field", "apes")), BooleanClause.Occur.MUST_NOT); - addPercolatorQuery("6", bq3.build()); - BooleanQuery.Builder bq4 = new BooleanQuery.Builder(); - bq4.add(new TermQuery(new Term("field", "fox")), BooleanClause.Occur.MUST_NOT); - bq4.add(new TermQuery(new Term("field", "apes")), BooleanClause.Occur.MUST); - addPercolatorQuery("7", bq4.build()); - PhraseQuery.Builder pq1 = new PhraseQuery.Builder(); - pq1.add(new Term("field", "lazy")); - pq1.add(new Term("field", "dog")); - addPercolatorQuery("8", pq1.build()); + public void testPercolateQuery() throws Exception { + List> docs = new ArrayList<>(); + List queries = new ArrayList<>(); + PercolateQuery.QueryStore queryStore = ctx -> queries::get; + queries.add(new TermQuery(new Term("field", "fox"))); + docs.add(Collections.singleton(new StringField("select", "a", Field.Store.NO))); + + SpanNearQuery.Builder snp = new SpanNearQuery.Builder("field", true); + snp.addClause(new SpanTermQuery(new Term("field", "jumps"))); + snp.addClause(new SpanTermQuery(new Term("field", "lazy"))); + snp.addClause(new SpanTermQuery(new Term("field", "dog"))); + snp.setSlop(2); + queries.add(snp.build()); + docs.add(Collections.singleton(new StringField("select", "b", Field.Store.NO))); + + PhraseQuery.Builder pq1 = new PhraseQuery.Builder(); + pq1.add(new Term("field", "quick")); + pq1.add(new Term("field", "brown")); + pq1.add(new Term("field", "jumps")); + pq1.setSlop(1); + queries.add(pq1.build()); + docs.add(Collections.singleton(new StringField("select", "b", Field.Store.NO))); + + BooleanQuery.Builder bq1 = new BooleanQuery.Builder(); + bq1.add(new TermQuery(new Term("field", "quick")), BooleanClause.Occur.MUST); + bq1.add(new TermQuery(new Term("field", "brown")), BooleanClause.Occur.MUST); + bq1.add(new TermQuery(new Term("field", "fox")), BooleanClause.Occur.MUST); + queries.add(bq1.build()); + docs.add(Collections.singleton(new StringField("select", "b", Field.Store.NO))); + + indexWriter.addDocuments(docs); indexWriter.close(); directoryReader = DirectoryReader.open(directory); IndexSearcher shardSearcher = newSearcher(directoryReader); @@ -153,26 +115,26 @@ public class PercolateQueryTests extends ESTestCase { MemoryIndex memoryIndex = new MemoryIndex(); memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - - PercolateQuery.Builder builder = new PercolateQuery.Builder( - "docType", - queryStore, - new BytesArray("{}"), - percolateSearcher - ); - builder.extractQueryTermsQuery(EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME); // no scoring, wrapping it in a constant score query: - Query query = new ConstantScoreQuery(builder.build()); + Query query = new ConstantScoreQuery(new PercolateQuery("type", queryStore, new BytesArray("a"), + new TermQuery(new Term("select", "a")), percolateSearcher, new MatchNoDocsQuery(""))); TopDocs topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits, equalTo(5)); - assertThat(topDocs.scoreDocs.length, equalTo(5)); + assertThat(topDocs.totalHits, equalTo(1)); + assertThat(topDocs.scoreDocs.length, equalTo(1)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); Explanation explanation = shardSearcher.explain(query, 0); assertThat(explanation.isMatch(), is(true)); assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[0].score)); + query = new ConstantScoreQuery(new PercolateQuery("type", queryStore, new BytesArray("b"), + new TermQuery(new Term("select", "b")), percolateSearcher, new MatchNoDocsQuery(""))); + topDocs = shardSearcher.search(query, 10); + assertThat(topDocs.totalHits, equalTo(3)); + assertThat(topDocs.scoreDocs.length, equalTo(3)); + assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); explanation = shardSearcher.explain(query, 1); - assertThat(explanation.isMatch(), is(false)); + assertThat(explanation.isMatch(), is(true)); + assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[0].score)); assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); explanation = shardSearcher.explain(query, 2); @@ -180,371 +142,37 @@ public class PercolateQueryTests extends ESTestCase { assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[1].score)); assertThat(topDocs.scoreDocs[2].doc, equalTo(3)); - explanation = shardSearcher.explain(query, 3); + explanation = shardSearcher.explain(query, 2); assertThat(explanation.isMatch(), is(true)); assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[2].score)); - explanation = shardSearcher.explain(query, 4); - assertThat(explanation.isMatch(), is(false)); + query = new ConstantScoreQuery(new PercolateQuery("type", queryStore, new BytesArray("c"), + new MatchAllDocsQuery(), percolateSearcher, new MatchAllDocsQuery())); + topDocs = shardSearcher.search(query, 10); + assertThat(topDocs.totalHits, equalTo(4)); - assertThat(topDocs.scoreDocs[3].doc, equalTo(5)); - explanation = shardSearcher.explain(query, 5); - assertThat(explanation.isMatch(), is(true)); - assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[3].score)); - - explanation = shardSearcher.explain(query, 6); - assertThat(explanation.isMatch(), is(false)); - - assertThat(topDocs.scoreDocs[4].doc, equalTo(7)); - explanation = shardSearcher.explain(query, 7); - assertThat(explanation.isMatch(), is(true)); - assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[4].score)); - } - - public void testVariousQueries_withScoring() throws Exception { - SpanNearQuery.Builder snp = new SpanNearQuery.Builder("field", true); - snp.addClause(new SpanTermQuery(new Term("field", "jumps"))); - snp.addClause(new SpanTermQuery(new Term("field", "lazy"))); - snp.addClause(new SpanTermQuery(new Term("field", "dog"))); - snp.setSlop(2); - addPercolatorQuery("1", snp.build()); - PhraseQuery.Builder pq1 = new PhraseQuery.Builder(); - pq1.add(new Term("field", "quick")); - pq1.add(new Term("field", "brown")); - pq1.add(new Term("field", "jumps")); - pq1.setSlop(1); - addPercolatorQuery("2", pq1.build()); - BooleanQuery.Builder bq1 = new BooleanQuery.Builder(); - bq1.add(new TermQuery(new Term("field", "quick")), BooleanClause.Occur.MUST); - bq1.add(new TermQuery(new Term("field", "brown")), BooleanClause.Occur.MUST); - bq1.add(new TermQuery(new Term("field", "fox")), BooleanClause.Occur.MUST); - addPercolatorQuery("3", bq1.build()); - - indexWriter.close(); - directoryReader = DirectoryReader.open(directory); - IndexSearcher shardSearcher = newSearcher(directoryReader); - - MemoryIndex memoryIndex = new MemoryIndex(); - memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); - IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - - PercolateQuery.Builder builder = new PercolateQuery.Builder( - "docType", - queryStore, - new BytesArray("{}"), - percolateSearcher - ); - builder.extractQueryTermsQuery(EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME); - Query query = builder.build(); - TopDocs topDocs = shardSearcher.search(query, 10); + query = new PercolateQuery("type", queryStore, new BytesArray("{}"), new TermQuery(new Term("select", "b")), + percolateSearcher, new MatchNoDocsQuery("")); + topDocs = shardSearcher.search(query, 10); assertThat(topDocs.totalHits, equalTo(3)); - - assertThat(topDocs.scoreDocs[0].doc, equalTo(2)); - Explanation explanation = shardSearcher.explain(query, 2); + assertThat(topDocs.scoreDocs.length, equalTo(3)); + assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); + explanation = shardSearcher.explain(query, 3); assertThat(explanation.isMatch(), is(true)); assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[0].score)); assertThat(explanation.getDetails(), arrayWithSize(1)); - assertThat(topDocs.scoreDocs[1].doc, equalTo(1)); - explanation = shardSearcher.explain(query, 1); + assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); + explanation = shardSearcher.explain(query, 2); assertThat(explanation.isMatch(), is(true)); assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[1].score)); assertThat(explanation.getDetails(), arrayWithSize(1)); - assertThat(topDocs.scoreDocs[2].doc, equalTo(0)); - explanation = shardSearcher.explain(query, 0); + assertThat(topDocs.scoreDocs[2].doc, equalTo(1)); + explanation = shardSearcher.explain(query, 1); assertThat(explanation.isMatch(), is(true)); assertThat(explanation.getValue(), equalTo(topDocs.scoreDocs[2].score)); assertThat(explanation.getDetails(), arrayWithSize(1)); } - public void testDuel() throws Exception { - List> queries = new ArrayList<>(); - queries.add((id) -> new PrefixQuery(new Term("field", id))); - queries.add((id) -> new WildcardQuery(new Term("field", id + "*"))); - queries.add((id) -> new CustomQuery(new Term("field", id))); - queries.add((id) -> new SpanTermQuery(new Term("field", id))); - queries.add((id) -> new TermQuery(new Term("field", id))); - queries.add((id) -> { - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - return builder.build(); - }); - queries.add((id) -> { - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.MUST); - if (randomBoolean()) { - builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT); - } - if (randomBoolean()) { - builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.MUST); - } - return builder.build(); - }); - queries.add((id) -> { - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.SHOULD); - if (randomBoolean()) { - builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT); - } - if (randomBoolean()) { - builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.SHOULD); - } - return builder.build(); - }); - queries.add((id) -> { - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); - builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); - if (randomBoolean()) { - builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT); - } - return builder.build(); - }); - queries.add((id) -> { - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); - builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); - if (randomBoolean()) { - builder.add(new MatchNoDocsQuery("no reason"), BooleanClause.Occur.MUST_NOT); - } - return builder.build(); - }); - queries.add((id) -> { - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.setMinimumNumberShouldMatch(randomIntBetween(0, 4)); - builder.add(new TermQuery(new Term("field", id)), BooleanClause.Occur.SHOULD); - builder.add(new CustomQuery(new Term("field", id)), BooleanClause.Occur.SHOULD); - return builder.build(); - }); - queries.add((id) -> new MatchAllDocsQuery()); - queries.add((id) -> new MatchNoDocsQuery("no reason at all")); - - int numDocs = randomIntBetween(queries.size(), queries.size() * 3); - for (int i = 0; i < numDocs; i++) { - String id = Integer.toString(i); - addPercolatorQuery(id, queries.get(i % queries.size()).apply(id)); - } - - indexWriter.close(); - directoryReader = DirectoryReader.open(directory); - IndexSearcher shardSearcher = newSearcher(directoryReader); - // Disable query cache, because ControlQuery cannot be cached... - shardSearcher.setQueryCache(null); - - for (int i = 0; i < numDocs; i++) { - String id = Integer.toString(i); - MemoryIndex memoryIndex = new MemoryIndex(); - memoryIndex.addField("field", id, new WhitespaceAnalyzer()); - duelRun(memoryIndex, shardSearcher); - } - - MemoryIndex memoryIndex = new MemoryIndex(); - memoryIndex.addField("field", "value", new WhitespaceAnalyzer()); - duelRun(memoryIndex, shardSearcher); - // Empty percolator doc: - memoryIndex = new MemoryIndex(); - duelRun(memoryIndex, shardSearcher); - } - - public void testDuelSpecificQueries() throws Exception { - CommonTermsQuery commonTermsQuery = new CommonTermsQuery(BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, 128); - commonTermsQuery.add(new Term("field", "quick")); - commonTermsQuery.add(new Term("field", "brown")); - commonTermsQuery.add(new Term("field", "fox")); - addPercolatorQuery("_id1", commonTermsQuery); - - BlendedTermQuery blendedTermQuery = BlendedTermQuery.booleanBlendedQuery(new Term[]{new Term("field", "quick"), - new Term("field", "brown"), new Term("field", "fox")}, false); - addPercolatorQuery("_id2", blendedTermQuery); - - SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("field", true) - .addClause(new SpanTermQuery(new Term("field", "quick"))) - .addClause(new SpanTermQuery(new Term("field", "brown"))) - .addClause(new SpanTermQuery(new Term("field", "fox"))) - .build(); - addPercolatorQuery("_id3", spanNearQuery); - - SpanNearQuery spanNearQuery2 = new SpanNearQuery.Builder("field", true) - .addClause(new SpanTermQuery(new Term("field", "the"))) - .addClause(new SpanTermQuery(new Term("field", "lazy"))) - .addClause(new SpanTermQuery(new Term("field", "doc"))) - .build(); - SpanOrQuery spanOrQuery = new SpanOrQuery( - spanNearQuery, - spanNearQuery2 - ); - addPercolatorQuery("_id4", spanOrQuery); - - SpanNotQuery spanNotQuery = new SpanNotQuery(spanNearQuery, spanNearQuery); - addPercolatorQuery("_id5", spanNotQuery); - - indexWriter.close(); - directoryReader = DirectoryReader.open(directory); - IndexSearcher shardSearcher = newSearcher(directoryReader); - // Disable query cache, because ControlQuery cannot be cached... - shardSearcher.setQueryCache(null); - - MemoryIndex memoryIndex = new MemoryIndex(); - memoryIndex.addField("field", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); - duelRun(memoryIndex, shardSearcher); - } - - void addPercolatorQuery(String id, Query query, String... extraFields) throws IOException { - queries.put(id, query); - ParseContext.Document document = new ParseContext.Document(); - ExtractQueryTermsService.extractQueryTerms(query, document, EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME, - EXTRACTED_TERMS_FIELD_TYPE); - document.add(new StoredField(UidFieldMapper.NAME, Uid.createUid(MapperService.PERCOLATOR_LEGACY_TYPE_NAME, id))); - assert extraFields.length % 2 == 0; - for (int i = 0; i < extraFields.length; i++) { - document.add(new StringField(extraFields[i], extraFields[++i], Field.Store.NO)); - } - indexWriter.addDocument(document); - } - - private void duelRun(MemoryIndex memoryIndex, IndexSearcher shardSearcher) throws IOException { - boolean requireScore = randomBoolean(); - IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - PercolateQuery.Builder builder = new PercolateQuery.Builder( - "docType", - queryStore, - new BytesArray("{}"), - percolateSearcher - ); - // enables the optimization that prevents queries from being evaluated that don't match - builder.extractQueryTermsQuery(EXTRACTED_TERMS_FIELD_NAME, UNKNOWN_QUERY_FIELD_NAME); - Query query = requireScore ? builder.build() : new ConstantScoreQuery(builder.build()); - TopDocs topDocs = shardSearcher.search(query, 10); - - Query controlQuery = new ControlQuery(memoryIndex, queryStore); - controlQuery = requireScore ? controlQuery : new ConstantScoreQuery(controlQuery); - TopDocs controlTopDocs = shardSearcher.search(controlQuery, 10); - assertThat(topDocs.totalHits, equalTo(controlTopDocs.totalHits)); - assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length)); - for (int j = 0; j < topDocs.scoreDocs.length; j++) { - assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc)); - assertThat(topDocs.scoreDocs[j].score, equalTo(controlTopDocs.scoreDocs[j].score)); - if (requireScore) { - Explanation explain1 = shardSearcher.explain(query, topDocs.scoreDocs[j].doc); - Explanation explain2 = shardSearcher.explain(controlQuery, controlTopDocs.scoreDocs[j].doc); - assertThat(explain1.isMatch(), equalTo(explain2.isMatch())); - assertThat(explain1.getValue(), equalTo(explain2.getValue())); - } - } - } - - private final static class CustomQuery extends Query { - - private final Term term; - - private CustomQuery(Term term) { - this.term = term; - } - - @Override - public Query rewrite(IndexReader reader) throws IOException { - return new TermQuery(term); - } - - @Override - public String toString(String field) { - return "custom{" + field + "}"; - } - - @Override - public boolean equals(Object obj) { - return sameClassAs(obj); - } - - @Override - public int hashCode() { - return classHash(); - } - } - - private final static class ControlQuery extends Query { - - private final MemoryIndex memoryIndex; - private final PercolateQuery.QueryStore queryStore; - - private ControlQuery(MemoryIndex memoryIndex, PercolateQuery.QueryStore queryStore) { - this.memoryIndex = memoryIndex; - this.queryStore = queryStore; - } - - @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) { - return new ConstantScoreWeight(this) { - - float _score; - - @Override - public Explanation explain(LeafReaderContext context, int doc) throws IOException { - Scorer scorer = scorer(context); - if (scorer != null) { - int result = scorer.iterator().advance(doc); - if (result == doc) { - return Explanation.match(scorer.score(), "ControlQuery"); - } - } - return Explanation.noMatch("ControlQuery"); - } - - @Override - public String toString() { - return "weight(" + ControlQuery.this + ")"; - } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - DocIdSetIterator allDocs = DocIdSetIterator.all(context.reader().maxDoc()); - PercolateQuery.QueryStore.Leaf leaf = queryStore.getQueries(context); - FilteredDocIdSetIterator memoryIndexIterator = new FilteredDocIdSetIterator(allDocs) { - - @Override - protected boolean match(int doc) { - try { - Query query = leaf.getQuery(doc); - float score = memoryIndex.search(query); - if (score != 0f) { - if (needsScores) { - _score = score; - } - return true; - } else { - return false; - } - } catch (IOException e) { - throw new RuntimeException(e); - } - } - }; - return new FilterScorer(new ConstantScoreScorer(this, score(), memoryIndexIterator)) { - - @Override - public float score() throws IOException { - return _score; - } - }; - } - }; - } - - @Override - public String toString(String field) { - return "control{" + field + "}"; - } - - @Override - public boolean equals(Object obj) { - return sameClassAs(obj); - } - - @Override - public int hashCode() { - return classHash(); - } - - } - } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java index 75d4f408774..f4a436e7c40 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorAggregationsIT.java @@ -50,8 +50,8 @@ import static org.hamcrest.Matchers.notNullValue; public class PercolatorAggregationsIT extends ESIntegTestCase { - private final static String INDEX_NAME = "queries"; - private final static String TYPE_NAME = "query"; + private static final String INDEX_NAME = "queries"; + private static final String TYPE_NAME = "query"; @Override protected Collection> nodePlugins() { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityTests.java index 701fd1b660f..7a51d8a7ab2 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorBackwardsCompatibilityTests.java @@ -48,7 +48,7 @@ import static org.hamcrest.Matchers.notNullValue; // Can'r run as IT as the test cluster is immutable and this test adds nodes during the test public class PercolatorBackwardsCompatibilityTests extends ESIntegTestCase { - private final static String INDEX_NAME = "percolator_index"; + private static final String INDEX_NAME = "percolator_index"; @Override protected Collection> nodePlugins() { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index d6221184b6e..57dda2f55cd 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -19,26 +19,51 @@ package org.elasticsearch.percolator; +import org.apache.lucene.analysis.core.WhitespaceAnalyzer; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.PrefixCodedTerms; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.memory.MemoryIndex; +import org.apache.lucene.queries.TermsQuery; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.BoostingQueryBuilder; +import org.elasticsearch.index.query.ConstantScoreQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardException; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; +import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder; import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.junit.Before; import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; @@ -49,8 +74,9 @@ import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.QueryBuilders.termsLookupQuery; import static org.elasticsearch.index.query.QueryBuilders.wildcardQuery; -import static org.elasticsearch.percolator.ExtractQueryTermsService.EXTRACTION_COMPLETE; -import static org.elasticsearch.percolator.ExtractQueryTermsService.EXTRACTION_FAILED; +import static org.elasticsearch.percolator.PercolatorFieldMapper.EXTRACTION_COMPLETE; +import static org.elasticsearch.percolator.PercolatorFieldMapper.EXTRACTION_FAILED; +import static org.elasticsearch.percolator.PercolatorFieldMapper.EXTRACTION_PARTIAL; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -61,7 +87,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { private String fieldName; private IndexService indexService; private MapperService mapperService; - private PercolatorFieldMapper.PercolatorFieldType fieldType; + private PercolatorFieldMapper.FieldType fieldType; @Override protected Collection> getPlugins() { @@ -77,6 +103,10 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { .startObject("_field_names").field("enabled", false).endObject() // makes testing easier .startObject("properties") .startObject("field").field("type", "text").endObject() + .startObject("field1").field("type", "text").endObject() + .startObject("field2").field("type", "text").endObject() + .startObject("_field3").field("type", "text").endObject() + .startObject("field4").field("type", "text").endObject() .startObject("number_field").field("type", "long").endObject() .startObject("date_field").field("type", "date").endObject() .endObject().endObject().endObject().string(); @@ -90,7 +120,101 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { .startObject("properties").startObject(fieldName).field("type", "percolator").endObject().endObject() .endObject().endObject().string(); mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE, true); - fieldType = (PercolatorFieldMapper.PercolatorFieldType) mapperService.fullName(fieldName); + fieldType = (PercolatorFieldMapper.FieldType) mapperService.fullName(fieldName); + } + + public void testExtractTerms() throws Exception { + addQueryMapping(); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); + TermQuery termQuery1 = new TermQuery(new Term("field", "term1")); + bq.add(termQuery1, BooleanClause.Occur.SHOULD); + TermQuery termQuery2 = new TermQuery(new Term("field", "term2")); + bq.add(termQuery2, BooleanClause.Occur.SHOULD); + + DocumentMapper documentMapper = mapperService.documentMapper(typeName); + PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); + ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, + mapperService.documentMapperParser(), documentMapper, null, null); + fieldMapper.processQuery(bq.build(), parseContext); + ParseContext.Document document = parseContext.doc(); + + PercolatorFieldMapper.FieldType fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType(); + assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_COMPLETE)); + List fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.queryTermsField.name()))); + Collections.sort(fields, (field1, field2) -> field1.binaryValue().compareTo(field2.binaryValue())); + assertThat(fields.size(), equalTo(2)); + assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("field\u0000term1")); + assertThat(fields.get(1).binaryValue().utf8ToString(), equalTo("field\u0000term2")); + } + + public void testExtractTermsAndRanges_failed() throws Exception { + addQueryMapping(); + TermRangeQuery query = new TermRangeQuery("field1", new BytesRef("a"), new BytesRef("z"), true, true); + DocumentMapper documentMapper = mapperService.documentMapper(typeName); + PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); + ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, + mapperService.documentMapperParser(), documentMapper, null, null); + fieldMapper.processQuery(query, parseContext); + ParseContext.Document document = parseContext.doc(); + + PercolatorFieldMapper.FieldType fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType(); + assertThat(document.getFields().size(), equalTo(1)); + assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_FAILED)); + } + + public void testExtractTermsAndRanges_partial() throws Exception { + addQueryMapping(); + PhraseQuery phraseQuery = new PhraseQuery("field", "term"); + DocumentMapper documentMapper = mapperService.documentMapper(typeName); + PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); + ParseContext.InternalParseContext parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, + mapperService.documentMapperParser(), documentMapper, null, null); + fieldMapper.processQuery(phraseQuery, parseContext); + ParseContext.Document document = parseContext.doc(); + + PercolatorFieldMapper.FieldType fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType(); + assertThat(document.getFields().size(), equalTo(2)); + assertThat(document.getFields().get(0).binaryValue().utf8ToString(), equalTo("field\u0000term")); + assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL)); + } + + public void testCreateCandidateQuery() throws Exception { + addQueryMapping(); + + MemoryIndex memoryIndex = new MemoryIndex(false); + memoryIndex.addField("field1", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); + memoryIndex.addField("field2", "some more text", new WhitespaceAnalyzer()); + memoryIndex.addField("_field3", "unhide me", new WhitespaceAnalyzer()); + memoryIndex.addField("field4", "123", new WhitespaceAnalyzer()); + memoryIndex.addField(new LongPoint("number_field", 10L), new WhitespaceAnalyzer()); + + IndexReader indexReader = memoryIndex.createSearcher().getIndexReader(); + + TermsQuery termsQuery = (TermsQuery) fieldType.createCandidateQuery(indexReader); + + PrefixCodedTerms terms = termsQuery.getTermData(); + assertThat(terms.size(), equalTo(15L)); + PrefixCodedTerms.TermIterator termIterator = terms.iterator(); + assertTermIterator(termIterator, "_field3\u0000me", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, "_field3\u0000unhide", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, "field1\u0000brown", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, "field1\u0000dog", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, "field1\u0000fox", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, "field1\u0000jumps", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, "field1\u0000lazy", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, "field1\u0000over", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, "field1\u0000quick", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, "field1\u0000the", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, "field2\u0000more", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, "field2\u0000some", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, "field2\u0000text", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, "field4\u0000123", fieldType.queryTermsField.name()); + assertTermIterator(termIterator, EXTRACTION_FAILED, fieldType.extractionResultField.name()); + } + + private void assertTermIterator(PrefixCodedTerms.TermIterator termIterator, String expectedValue, String expectedField) { + assertThat(termIterator.next().utf8ToString(), equalTo(expectedValue)); + assertThat(termIterator.field(), equalTo(expectedField)); } public void testPercolatorFieldMapper() throws Exception { @@ -100,12 +224,13 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { .field(fieldName, queryBuilder) .endObject().bytes()); - assertThat(doc.rootDoc().getFields(fieldType.getExtractedTermsField()).length, equalTo(1)); - assertThat(doc.rootDoc().getFields(fieldType.getExtractedTermsField())[0].binaryValue().utf8ToString(), equalTo("field\0value")); - assertThat(doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName()).length, equalTo(1)); - assertThat(doc.rootDoc().getFields(fieldType.getExtractionResultFieldName()).length, equalTo(1)); - assertThat(doc.rootDoc().getFields(fieldType.getExtractionResultFieldName())[0].stringValue(), equalTo(EXTRACTION_COMPLETE)); - BytesRef qbSource = doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName())[0].binaryValue(); + assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name()).length, equalTo(1)); + assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name())[0].binaryValue().utf8ToString(), equalTo("field\0value")); + assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(1)); + assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1)); + assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(), + equalTo(EXTRACTION_COMPLETE)); + BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); assertQueryBuilder(qbSource, queryBuilder); // add an query for which we don't extract terms from @@ -113,11 +238,12 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() .field(fieldName, queryBuilder) .endObject().bytes()); - assertThat(doc.rootDoc().getFields(fieldType.getExtractionResultFieldName()).length, equalTo(1)); - assertThat(doc.rootDoc().getFields(fieldType.getExtractionResultFieldName())[0].stringValue(), equalTo(EXTRACTION_FAILED)); - assertThat(doc.rootDoc().getFields(fieldType.getExtractedTermsField()).length, equalTo(0)); - assertThat(doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName()).length, equalTo(1)); - qbSource = doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName())[0].binaryValue(); + assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1)); + assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(), + equalTo(EXTRACTION_FAILED)); + assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name()).length, equalTo(0)); + assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(1)); + qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); assertQueryBuilder(qbSource, queryBuilder); } @@ -136,7 +262,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { XContentFactory.jsonBuilder().startObject() .field(fieldName, query) .endObject().bytes()); - BytesRef qbSource = doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName())[0].binaryValue(); + BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); assertQueryBuilder(qbSource, query); } } @@ -148,7 +274,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() .field(fieldName, queryBuilder) .endObject().bytes()); - BytesRef qbSource = doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName())[0].binaryValue(); + BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); assertQueryBuilder(qbSource, queryBuilder.rewrite(indexService.newQueryShardContext())); } @@ -169,7 +295,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { addQueryMapping(); ParsedDocument doc = mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() .endObject().bytes()); - assertThat(doc.rootDoc().getFields(fieldType.getQueryBuilderFieldName()).length, equalTo(0)); + assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(0)); try { mapperService.documentMapper(typeName).parse("test", typeName, "1", XContentFactory.jsonBuilder().startObject() @@ -275,6 +401,53 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { assertThat(e.getCause().getMessage(), equalTo("a document can only contain one percolator query")); } + public void testRangeQueryWithNowRangeIsForbidden() throws Exception { + addQueryMapping(); + MapperParsingException e = expectThrows(MapperParsingException.class, () -> { + mapperService.documentMapper(typeName).parse("test", typeName, "1", + jsonBuilder().startObject() + .field(fieldName, rangeQuery("date_field").from("2016-01-01||/D").to("now")) + .endObject().bytes()); + } + ); + assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); + e = expectThrows(MapperParsingException.class, () -> { + mapperService.documentMapper(typeName).parse("test", typeName, "1", + jsonBuilder().startObject() + .field(fieldName, rangeQuery("date_field").from("2016-01-01||/D").to("now/D")) + .endObject().bytes()); + } + ); + assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); + e = expectThrows(MapperParsingException.class, () -> { + mapperService.documentMapper(typeName).parse("test", typeName, "1", + jsonBuilder().startObject() + .field(fieldName, rangeQuery("date_field").from("now-1d").to("now")) + .endObject().bytes()); + } + ); + assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); + } + + public void testVerifyRangeQueries() { + RangeQueryBuilder rangeQuery1 = new RangeQueryBuilder("field").from("2016-01-01||/D").to("2017-01-01||/D"); + RangeQueryBuilder rangeQuery2 = new RangeQueryBuilder("field").from("2016-01-01||/D").to("now"); + PercolatorFieldMapper.verifyRangeQueries(rangeQuery1); + expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyRangeQueries(rangeQuery2)); + PercolatorFieldMapper.verifyRangeQueries(new BoolQueryBuilder().must(rangeQuery1)); + expectThrows(IllegalArgumentException.class, () -> + PercolatorFieldMapper.verifyRangeQueries(new BoolQueryBuilder().must(rangeQuery2))); + PercolatorFieldMapper.verifyRangeQueries(new ConstantScoreQueryBuilder((rangeQuery1))); + expectThrows(IllegalArgumentException.class, () -> + PercolatorFieldMapper.verifyRangeQueries(new ConstantScoreQueryBuilder(rangeQuery2))); + PercolatorFieldMapper.verifyRangeQueries(new BoostingQueryBuilder(rangeQuery1, new MatchAllQueryBuilder())); + expectThrows(IllegalArgumentException.class, () -> + PercolatorFieldMapper.verifyRangeQueries(new BoostingQueryBuilder(rangeQuery2, new MatchAllQueryBuilder()))); + PercolatorFieldMapper.verifyRangeQueries(new FunctionScoreQueryBuilder(rangeQuery1, new RandomScoreFunctionBuilder())); + expectThrows(IllegalArgumentException.class, () -> + PercolatorFieldMapper.verifyRangeQueries(new FunctionScoreQueryBuilder(rangeQuery2, new RandomScoreFunctionBuilder()))); + } + private void assertQueryBuilder(BytesRef actual, QueryBuilder expected) throws IOException { XContentParser sourceParser = PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE.xContent() .createParser(actual.bytes, actual.offset, actual.length); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java index ec4107fc2ed..55a25768187 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhaseTests.java @@ -22,18 +22,22 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; +import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.search.Highlighters; import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.mockito.Mockito; +import java.util.Arrays; import java.util.Collections; +import static java.util.Collections.emptyMap; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; @@ -41,12 +45,11 @@ import static org.hamcrest.Matchers.sameInstance; public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase { public void testHitsExecutionNeeded() { - PercolateQuery percolateQuery = new PercolateQuery.Builder("", ctx -> null, new BytesArray("{}"), - Mockito.mock(IndexSearcher.class)) - .build(); - + PercolateQuery percolateQuery = new PercolateQuery( + "", ctx -> null, new BytesArray("{}"), new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), new MatchAllDocsQuery() + ); PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(Settings.EMPTY, - new Highlighters(Settings.EMPTY)); + emptyMap()); SearchContext searchContext = Mockito.mock(SearchContext.class); Mockito.when(searchContext.highlight()).thenReturn(new SearchContextHighlight(Collections.emptyList())); Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery()); @@ -57,10 +60,9 @@ public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase { } public void testLocatePercolatorQuery() { - PercolateQuery percolateQuery = new PercolateQuery.Builder("", ctx -> null, new BytesArray("{}"), - Mockito.mock(IndexSearcher.class)) - .build(); - + PercolateQuery percolateQuery = new PercolateQuery( + "", ctx -> null, new BytesArray("{}"), new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), new MatchAllDocsQuery() + ); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(new MatchAllDocsQuery()), nullValue()); BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER); @@ -77,6 +79,16 @@ public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase { assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(boostQuery), nullValue()); boostQuery = new BoostQuery(percolateQuery, 1f); assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(boostQuery), sameInstance(percolateQuery)); + + FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(new MatchAllDocsQuery(), new RandomScoreFunction()); + assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(functionScoreQuery), nullValue()); + functionScoreQuery = new FunctionScoreQuery(percolateQuery, new RandomScoreFunction()); + assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(functionScoreQuery), sameInstance(percolateQuery)); + + DisjunctionMaxQuery disjunctionMaxQuery = new DisjunctionMaxQuery(Arrays.asList(new MatchAllDocsQuery()), 1f); + assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(disjunctionMaxQuery), nullValue()); + disjunctionMaxQuery = new DisjunctionMaxQuery(Arrays.asList(percolateQuery, new MatchAllDocsQuery()), 1f); + assertThat(PercolatorHighlightSubFetchPhase.locatePercolatorQuery(disjunctionMaxQuery), sameInstance(percolateQuery)); } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java index ff9bf6634cd..bdfa49016e9 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorIT.java @@ -91,8 +91,8 @@ import static org.hamcrest.Matchers.nullValue; public class PercolatorIT extends ESIntegTestCase { - private final static String INDEX_NAME = "queries"; - private final static String TYPE_NAME = "query"; + private static final String INDEX_NAME = "queries"; + private static final String TYPE_NAME = "query"; @Override protected Collection> nodePlugins() { @@ -1553,31 +1553,6 @@ public class PercolatorIT extends ESIntegTestCase { } } - public void testPercolatorQueryWithNowRange() throws Exception { - client().admin().indices().prepareCreate(INDEX_NAME) - .addMapping("my-type", "timestamp", "type=date,format=epoch_millis") - .addMapping(TYPE_NAME, "query", "type=percolator") - .get(); - ensureGreen(); - - client().prepareIndex(INDEX_NAME, TYPE_NAME, "1") - .setSource(jsonBuilder().startObject().field("query", rangeQuery("timestamp").from("now-1d").to("now")).endObject()) - .get(); - client().prepareIndex(INDEX_NAME, TYPE_NAME, "2") - .setSource(jsonBuilder().startObject().field("query", constantScoreQuery(rangeQuery("timestamp").from("now-1d").to("now"))).endObject()) - .get(); - refresh(); - - logger.info("--> Percolate doc with field1=b"); - PercolateResponse response = preparePercolate(client()) - .setIndices(INDEX_NAME).setDocumentType("my-type") - .setPercolateDoc(docBuilder().setDoc("timestamp", System.currentTimeMillis())) - .get(); - assertMatchCount(response, 2L); - assertThat(response.getMatches(), arrayWithSize(2)); - assertThat(convertFromTextArray(response.getMatches(), INDEX_NAME), arrayContainingInAnyOrder("1", "2")); - } - void initNestedIndexAndPercolation() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder(); mapping.startObject().startObject("properties").startObject("companyname").field("type", "text").endObject() diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index 5125a7ea5cc..3cc60d75cf2 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -44,6 +44,7 @@ import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; +import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.elasticsearch.index.query.QueryBuilders.spanNearQuery; import static org.elasticsearch.index.query.QueryBuilders.spanNotQuery; import static org.elasticsearch.index.query.QueryBuilders.spanTermQuery; @@ -109,6 +110,102 @@ public class PercolatorQuerySearchIT extends ESSingleNodeTestCase { assertThat(response.getHits().getAt(2).getId(), equalTo("3")); } + public void testPercolatorRangeQueries() throws Exception { + createIndex("test", client().admin().indices().prepareCreate("test") + .addMapping("type", "field1", "type=long", "field2", "type=double", "field3", "type=ip") + .addMapping("queries", "query", "type=percolator") + ); + + client().prepareIndex("test", "queries", "1") + .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(10).to(12)).endObject()) + .get(); + client().prepareIndex("test", "queries", "2") + .setSource(jsonBuilder().startObject().field("query", rangeQuery("field1").from(20).to(22)).endObject()) + .get(); + client().prepareIndex("test", "queries", "3") + .setSource(jsonBuilder().startObject().field("query", boolQuery() + .must(rangeQuery("field1").from(10).to(12)) + .must(rangeQuery("field1").from(12).to(14)) + ).endObject()).get(); + client().admin().indices().prepareRefresh().get(); + client().prepareIndex("test", "queries", "4") + .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from(10).to(12)).endObject()) + .get(); + client().prepareIndex("test", "queries", "5") + .setSource(jsonBuilder().startObject().field("query", rangeQuery("field2").from(20).to(22)).endObject()) + .get(); + client().prepareIndex("test", "queries", "6") + .setSource(jsonBuilder().startObject().field("query", boolQuery() + .must(rangeQuery("field2").from(10).to(12)) + .must(rangeQuery("field2").from(12).to(14)) + ).endObject()).get(); + client().admin().indices().prepareRefresh().get(); + client().prepareIndex("test", "queries", "7") + .setSource(jsonBuilder().startObject() + .field("query", rangeQuery("field3").from("192.168.1.0").to("192.168.1.5")) + .endObject()) + .get(); + client().prepareIndex("test", "queries", "8") + .setSource(jsonBuilder().startObject() + .field("query", rangeQuery("field3").from("192.168.1.20").to("192.168.1.30")) + .endObject()) + .get(); + client().prepareIndex("test", "queries", "9") + .setSource(jsonBuilder().startObject().field("query", boolQuery() + .must(rangeQuery("field3").from("192.168.1.0").to("192.168.1.5")) + .must(rangeQuery("field3").from("192.168.1.5").to("192.168.1.10")) + ).endObject()).get(); + client().admin().indices().prepareRefresh().get(); + + // Test long range: + BytesReference source = jsonBuilder().startObject().field("field1", 12).endObject().bytes(); + SearchResponse response = client().prepareSearch() + .setQuery(new PercolateQueryBuilder("query", "type", source)) + .get(); + assertHitCount(response, 2); + assertThat(response.getHits().getAt(0).getId(), equalTo("3")); + assertThat(response.getHits().getAt(1).getId(), equalTo("1")); + + source = jsonBuilder().startObject().field("field1", 11).endObject().bytes(); + response = client().prepareSearch() + .setQuery(new PercolateQueryBuilder("query", "type", source)) + .get(); + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + + // Test double range: + source = jsonBuilder().startObject().field("field2", 12).endObject().bytes(); + response = client().prepareSearch() + .setQuery(new PercolateQueryBuilder("query", "type", source)) + .get(); + assertHitCount(response, 2); + assertThat(response.getHits().getAt(0).getId(), equalTo("6")); + assertThat(response.getHits().getAt(1).getId(), equalTo("4")); + + source = jsonBuilder().startObject().field("field2", 11).endObject().bytes(); + response = client().prepareSearch() + .setQuery(new PercolateQueryBuilder("query", "type", source)) + .get(); + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("4")); + + // Test IP range: + source = jsonBuilder().startObject().field("field3", "192.168.1.5").endObject().bytes(); + response = client().prepareSearch() + .setQuery(new PercolateQueryBuilder("query", "type", source)) + .get(); + assertHitCount(response, 2); + assertThat(response.getHits().getAt(0).getId(), equalTo("9")); + assertThat(response.getHits().getAt(1).getId(), equalTo("7")); + + source = jsonBuilder().startObject().field("field3", "192.168.1.4").endObject().bytes(); + response = client().prepareSearch() + .setQuery(new PercolateQueryBuilder("query", "type", source)) + .get(); + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("7")); + } + public void testPercolatorQueryExistingDocument() throws Exception { createIndex("test", client().admin().indices().prepareCreate("test") .addMapping("type", "field1", "type=keyword", "field2", "type=keyword") diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/ExtractQueryTermsServiceTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java similarity index 74% rename from modules/percolator/src/test/java/org/elasticsearch/percolator/ExtractQueryTermsServiceTests.java rename to modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java index b9486a89f49..1b8b123aa13 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/ExtractQueryTermsServiceTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java @@ -18,14 +18,7 @@ */ package org.elasticsearch.percolator; -import org.apache.lucene.analysis.core.WhitespaceAnalyzer; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.index.Term; -import org.apache.lucene.index.memory.MemoryIndex; import org.apache.lucene.queries.BlendedTermQuery; import org.apache.lucene.queries.CommonTermsQuery; import org.apache.lucene.queries.TermsQuery; @@ -46,11 +39,11 @@ import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.search.MatchNoDocsQuery; -import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.percolator.ExtractQueryTermsService.Result; +import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; +import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; +import org.elasticsearch.percolator.QueryAnalyzer.Result; import org.elasticsearch.test.ESTestCase; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -58,70 +51,18 @@ import java.util.HashSet; import java.util.List; import java.util.Set; -import static org.elasticsearch.percolator.ExtractQueryTermsService.EXTRACTION_COMPLETE; -import static org.elasticsearch.percolator.ExtractQueryTermsService.EXTRACTION_FAILED; -import static org.elasticsearch.percolator.ExtractQueryTermsService.EXTRACTION_PARTIAL; -import static org.elasticsearch.percolator.ExtractQueryTermsService.UnsupportedQueryException; -import static org.elasticsearch.percolator.ExtractQueryTermsService.extractQueryTerms; -import static org.elasticsearch.percolator.ExtractQueryTermsService.createQueryTermsQuery; -import static org.elasticsearch.percolator.ExtractQueryTermsService.selectTermListWithTheLongestShortestTerm; +import static org.elasticsearch.percolator.QueryAnalyzer.UnsupportedQueryException; +import static org.elasticsearch.percolator.QueryAnalyzer.analyze; +import static org.elasticsearch.percolator.QueryAnalyzer.selectTermListWithTheLongestShortestTerm; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.sameInstance; -public class ExtractQueryTermsServiceTests extends ESTestCase { - - public final static String QUERY_TERMS_FIELD = "extracted_terms"; - public final static String EXTRACTION_RESULT_FIELD = "extraction_result"; - public final static FieldType QUERY_TERMS_FIELD_TYPE = new FieldType(); - - static { - QUERY_TERMS_FIELD_TYPE.setTokenized(false); - QUERY_TERMS_FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); - QUERY_TERMS_FIELD_TYPE.freeze(); - } - - public void testExtractQueryMetadata() { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); - TermQuery termQuery1 = new TermQuery(new Term("field1", "term1")); - bq.add(termQuery1, BooleanClause.Occur.SHOULD); - TermQuery termQuery2 = new TermQuery(new Term("field2", "term2")); - bq.add(termQuery2, BooleanClause.Occur.SHOULD); - - ParseContext.Document document = new ParseContext.Document(); - extractQueryTerms(bq.build(), document, QUERY_TERMS_FIELD, EXTRACTION_RESULT_FIELD, QUERY_TERMS_FIELD_TYPE); - assertThat(document.getField(EXTRACTION_RESULT_FIELD).stringValue(), equalTo(EXTRACTION_COMPLETE)); - List fields = new ArrayList<>(Arrays.asList(document.getFields(QUERY_TERMS_FIELD))); - Collections.sort(fields, (field1, field2) -> field1.binaryValue().compareTo(field2.binaryValue())); - assertThat(fields.size(), equalTo(2)); - assertThat(fields.get(0).name(), equalTo(QUERY_TERMS_FIELD)); - assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("field1\u0000term1")); - assertThat(fields.get(1).name(), equalTo(QUERY_TERMS_FIELD)); - assertThat(fields.get(1).binaryValue().utf8ToString(), equalTo("field2\u0000term2")); - } - - public void testExtractQueryMetadata_unsupported() { - TermRangeQuery query = new TermRangeQuery("field1", new BytesRef("a"), new BytesRef("z"), true, true); - ParseContext.Document document = new ParseContext.Document(); - extractQueryTerms(query, document, QUERY_TERMS_FIELD, EXTRACTION_RESULT_FIELD, QUERY_TERMS_FIELD_TYPE); - assertThat(document.getFields().size(), equalTo(1)); - assertThat(document.getField(EXTRACTION_RESULT_FIELD).stringValue(), equalTo(EXTRACTION_FAILED)); - } - - public void testExtractQueryMetadata_notVerified() { - PhraseQuery phraseQuery = new PhraseQuery("field", "term"); - - ParseContext.Document document = new ParseContext.Document(); - extractQueryTerms(phraseQuery, document, QUERY_TERMS_FIELD, EXTRACTION_RESULT_FIELD, QUERY_TERMS_FIELD_TYPE); - assertThat(document.getFields().size(), equalTo(2)); - assertThat(document.getFields().get(0).name(), equalTo(QUERY_TERMS_FIELD)); - assertThat(document.getFields().get(0).binaryValue().utf8ToString(), equalTo("field\u0000term")); - assertThat(document.getField(EXTRACTION_RESULT_FIELD).stringValue(), equalTo(EXTRACTION_PARTIAL)); - } +public class QueryAnalyzerTests extends ESTestCase { public void testExtractQueryMetadata_termQuery() { TermQuery termQuery = new TermQuery(new Term("_field", "_term")); - Result result = extractQueryTerms(termQuery); + Result result = analyze(termQuery); assertThat(result.verified, is(true)); List terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); @@ -131,7 +72,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { public void testExtractQueryMetadata_termsQuery() { TermsQuery termsQuery = new TermsQuery("_field", new BytesRef("_term1"), new BytesRef("_term2")); - Result result = extractQueryTerms(termsQuery); + Result result = analyze(termsQuery); assertThat(result.verified, is(true)); List terms = new ArrayList<>(result.terms); Collections.sort(terms); @@ -143,7 +84,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { // test with different fields termsQuery = new TermsQuery(new Term("_field1", "_term1"), new Term("_field2", "_term2")); - result = extractQueryTerms(termsQuery); + result = analyze(termsQuery); assertThat(result.verified, is(true)); terms = new ArrayList<>(result.terms); Collections.sort(terms); @@ -156,7 +97,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { public void testExtractQueryMetadata_phraseQuery() { PhraseQuery phraseQuery = new PhraseQuery("_field", "_term1", "term2"); - Result result = extractQueryTerms(phraseQuery); + Result result = analyze(phraseQuery); assertThat(result.verified, is(false)); List terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); @@ -179,7 +120,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD); BooleanQuery booleanQuery = builder.build(); - Result result = extractQueryTerms(booleanQuery); + Result result = analyze(booleanQuery); assertThat("Should clause with phrase query isn't verified, so entire query can't be verified", result.verified, is(false)); List terms = new ArrayList<>(result.terms); Collections.sort(terms); @@ -207,7 +148,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD); BooleanQuery booleanQuery = builder.build(); - Result result = extractQueryTerms(booleanQuery); + Result result = analyze(booleanQuery); assertThat(result.verified, is(true)); List terms = new ArrayList<>(result.terms); Collections.sort(terms); @@ -230,7 +171,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(phraseQuery, BooleanClause.Occur.SHOULD); BooleanQuery booleanQuery = builder.build(); - Result result = extractQueryTerms(booleanQuery); + Result result = analyze(booleanQuery); assertThat(result.verified, is(false)); List terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); @@ -244,58 +185,58 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(termQuery1, BooleanClause.Occur.SHOULD); TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2")); builder.add(termQuery2, BooleanClause.Occur.SHOULD); - Result result = extractQueryTerms(builder.build()); + Result result = analyze(builder.build()); assertThat("All clauses are exact, so candidate matches are verified", result.verified, is(true)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, BooleanClause.Occur.SHOULD); PhraseQuery phraseQuery1 = new PhraseQuery("_field", "_term1", "_term2"); builder.add(phraseQuery1, BooleanClause.Occur.SHOULD); - result = extractQueryTerms(builder.build()); + result = analyze(builder.build()); assertThat("Clause isn't exact, so candidate matches are not verified", result.verified, is(false)); builder = new BooleanQuery.Builder(); builder.add(phraseQuery1, BooleanClause.Occur.SHOULD); PhraseQuery phraseQuery2 = new PhraseQuery("_field", "_term3", "_term4"); builder.add(phraseQuery2, BooleanClause.Occur.SHOULD); - result = extractQueryTerms(builder.build()); + result = analyze(builder.build()); assertThat("No clause is exact, so candidate matches are not verified", result.verified, is(false)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, BooleanClause.Occur.MUST_NOT); builder.add(termQuery2, BooleanClause.Occur.SHOULD); - result = extractQueryTerms(builder.build()); + result = analyze(builder.build()); assertThat("There is a must_not clause, so candidate matches are not verified", result.verified, is(false)); builder = new BooleanQuery.Builder(); builder.setMinimumNumberShouldMatch(randomIntBetween(2, 32)); builder.add(termQuery1, BooleanClause.Occur.SHOULD); builder.add(termQuery2, BooleanClause.Occur.SHOULD); - result = extractQueryTerms(builder.build()); + result = analyze(builder.build()); assertThat("Minimum match is >= 1, so candidate matches are not verified", result.verified, is(false)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); - result = extractQueryTerms(builder.build()); + result = analyze(builder.build()); assertThat("Single required clause, so candidate matches are verified", result.verified, is(false)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); builder.add(termQuery2, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); - result = extractQueryTerms(builder.build()); + result = analyze(builder.build()); assertThat("Two or more required clauses, so candidate matches are not verified", result.verified, is(false)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); builder.add(termQuery2, BooleanClause.Occur.MUST_NOT); - result = extractQueryTerms(builder.build()); + result = analyze(builder.build()); assertThat("Required and prohibited clauses, so candidate matches are not verified", result.verified, is(false)); } public void testExtractQueryMetadata_constantScoreQuery() { TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); ConstantScoreQuery constantScoreQuery = new ConstantScoreQuery(termQuery1); - Result result = extractQueryTerms(constantScoreQuery); + Result result = analyze(constantScoreQuery); assertThat(result.verified, is(true)); List terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); @@ -306,7 +247,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { public void testExtractQueryMetadata_boostQuery() { TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); BoostQuery constantScoreQuery = new BoostQuery(termQuery1, 1f); - Result result = extractQueryTerms(constantScoreQuery); + Result result = analyze(constantScoreQuery); assertThat(result.verified, is(true)); List terms = new ArrayList<>(result.terms); assertThat(terms.size(), equalTo(1)); @@ -318,7 +259,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { CommonTermsQuery commonTermsQuery = new CommonTermsQuery(BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, 100); commonTermsQuery.add(new Term("_field", "_term1")); commonTermsQuery.add(new Term("_field", "_term2")); - Result result = extractQueryTerms(commonTermsQuery); + Result result = analyze(commonTermsQuery); assertThat(result.verified, is(false)); List terms = new ArrayList<>(result.terms); Collections.sort(terms); @@ -332,7 +273,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { public void testExtractQueryMetadata_blendedTermQuery() { Term[] termsArr = new Term[]{new Term("_field", "_term1"), new Term("_field", "_term2")}; BlendedTermQuery commonTermsQuery = BlendedTermQuery.booleanBlendedQuery(termsArr, false); - Result result = extractQueryTerms(commonTermsQuery); + Result result = analyze(commonTermsQuery); assertThat(result.verified, is(true)); List terms = new ArrayList<>(result.terms); Collections.sort(terms); @@ -356,7 +297,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { // 4) FieldMaskingSpanQuery is a tricky query so we shouldn't optimize this SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); - Result result = extractQueryTerms(spanTermQuery1); + Result result = analyze(spanTermQuery1); assertThat(result.verified, is(true)); assertTermsEqual(result.terms, spanTermQuery1.getTerm()); } @@ -367,7 +308,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true) .addClause(spanTermQuery1).addClause(spanTermQuery2).build(); - Result result = extractQueryTerms(spanNearQuery); + Result result = analyze(spanNearQuery); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, spanTermQuery2.getTerm()); } @@ -376,7 +317,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); SpanOrQuery spanOrQuery = new SpanOrQuery(spanTermQuery1, spanTermQuery2); - Result result = extractQueryTerms(spanOrQuery); + Result result = analyze(spanOrQuery); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, spanTermQuery1.getTerm(), spanTermQuery2.getTerm()); } @@ -384,7 +325,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { public void testExtractQueryMetadata_spanFirstQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanFirstQuery spanFirstQuery = new SpanFirstQuery(spanTermQuery1, 20); - Result result = extractQueryTerms(spanFirstQuery); + Result result = analyze(spanFirstQuery); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, spanTermQuery1.getTerm()); } @@ -393,27 +334,27 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); SpanNotQuery spanNotQuery = new SpanNotQuery(spanTermQuery1, spanTermQuery2); - Result result = extractQueryTerms(spanNotQuery); + Result result = analyze(spanNotQuery); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, spanTermQuery1.getTerm()); } public void testExtractQueryMetadata_matchNoDocsQuery() { - Result result = extractQueryTerms(new MatchNoDocsQuery("sometimes there is no reason at all")); + Result result = analyze(new MatchNoDocsQuery("sometimes there is no reason at all")); assertThat(result.verified, is(true)); assertEquals(0, result.terms.size()); BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.MUST); bq.add(new MatchNoDocsQuery("sometimes there is no reason at all"), BooleanClause.Occur.MUST); - result = extractQueryTerms(bq.build()); + result = analyze(bq.build()); assertThat(result.verified, is(false)); assertEquals(0, result.terms.size()); bq = new BooleanQuery.Builder(); bq.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.SHOULD); bq.add(new MatchNoDocsQuery("sometimes there is no reason at all"), BooleanClause.Occur.SHOULD); - result = extractQueryTerms(bq.build()); + result = analyze(bq.build()); assertThat(result.verified, is(true)); assertTermsEqual(result.terms, new Term("field", "value")); @@ -421,18 +362,18 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { Arrays.asList(new TermQuery(new Term("field", "value")), new MatchNoDocsQuery("sometimes there is no reason at all")), 1f ); - result = extractQueryTerms(disjunctionMaxQuery); + result = analyze(disjunctionMaxQuery); assertThat(result.verified, is(true)); assertTermsEqual(result.terms, new Term("field", "value")); } public void testExtractQueryMetadata_matchAllDocsQuery() { - expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(new MatchAllDocsQuery())); + expectThrows(UnsupportedQueryException.class, () -> analyze(new MatchAllDocsQuery())); BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.MUST); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); - Result result = extractQueryTerms(builder.build()); + Result result = analyze(builder.build()); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, new Term("field", "value")); @@ -441,39 +382,39 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); BooleanQuery bq1 = builder.build(); - expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(bq1)); + expectThrows(UnsupportedQueryException.class, () -> analyze(bq1)); builder = new BooleanQuery.Builder(); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST_NOT); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); BooleanQuery bq2 = builder.build(); - expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(bq2)); + expectThrows(UnsupportedQueryException.class, () -> analyze(bq2)); builder = new BooleanQuery.Builder(); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); BooleanQuery bq3 = builder.build(); - expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(bq3)); + expectThrows(UnsupportedQueryException.class, () -> analyze(bq3)); builder = new BooleanQuery.Builder(); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST_NOT); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); BooleanQuery bq4 = builder.build(); - expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(bq4)); + expectThrows(UnsupportedQueryException.class, () -> analyze(bq4)); builder = new BooleanQuery.Builder(); builder.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.SHOULD); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); BooleanQuery bq5 = builder.build(); - expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(bq5)); + expectThrows(UnsupportedQueryException.class, () -> analyze(bq5)); } public void testExtractQueryMetadata_unsupportedQuery() { TermRangeQuery termRangeQuery = new TermRangeQuery("_field", null, null, true, false); - UnsupportedQueryException e = expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(termRangeQuery)); + UnsupportedQueryException e = expectThrows(UnsupportedQueryException.class, () -> analyze(termRangeQuery)); assertThat(e.getUnsupportedQuery(), sameInstance(termRangeQuery)); TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); @@ -482,7 +423,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(termRangeQuery, BooleanClause.Occur.SHOULD); BooleanQuery bq = builder.build(); - e = expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(bq)); + e = expectThrows(UnsupportedQueryException.class, () -> analyze(bq)); assertThat(e.getUnsupportedQuery(), sameInstance(termRangeQuery)); } @@ -495,7 +436,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(unsupportedQuery, BooleanClause.Occur.MUST); BooleanQuery bq1 = builder.build(); - Result result = extractQueryTerms(bq1); + Result result = analyze(bq1); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, termQuery1.getTerm()); @@ -505,7 +446,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(termQuery2, BooleanClause.Occur.MUST); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); bq1 = builder.build(); - result = extractQueryTerms(bq1); + result = analyze(bq1); assertThat(result.verified, is(false)); assertTermsEqual(result.terms, termQuery2.getTerm()); @@ -513,7 +454,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { builder.add(unsupportedQuery, BooleanClause.Occur.MUST); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); BooleanQuery bq2 = builder.build(); - UnsupportedQueryException e = expectThrows(UnsupportedQueryException.class, () -> extractQueryTerms(bq2)); + UnsupportedQueryException e = expectThrows(UnsupportedQueryException.class, () -> analyze(bq2)); assertThat(e.getUnsupportedQuery(), sameInstance(unsupportedQuery)); } @@ -526,7 +467,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { Arrays.asList(termQuery1, termQuery2, termQuery3, termQuery4), 0.1f ); - Result result = extractQueryTerms(disjunctionMaxQuery); + Result result = analyze(disjunctionMaxQuery); assertThat(result.verified, is(true)); List terms = new ArrayList<>(result.terms); Collections.sort(terms); @@ -544,7 +485,7 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { Arrays.asList(termQuery1, termQuery2, termQuery3, new PhraseQuery("_field", "_term4")), 0.1f ); - result = extractQueryTerms(disjunctionMaxQuery); + result = analyze(disjunctionMaxQuery); assertThat(result.verified, is(false)); terms = new ArrayList<>(result.terms); Collections.sort(terms); @@ -561,45 +502,27 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { public void testSynonymQuery() { SynonymQuery query = new SynonymQuery(); - Result result = extractQueryTerms(query); + Result result = analyze(query); assertThat(result.verified, is(true)); assertThat(result.terms.isEmpty(), is(true)); query = new SynonymQuery(new Term("_field", "_value1"), new Term("_field", "_value2")); - result = extractQueryTerms(query); + result = analyze(query); assertThat(result.verified, is(true)); assertTermsEqual(result.terms, new Term("_field", "_value1"), new Term("_field", "_value2")); } - public void testCreateQueryMetadataQuery() throws Exception { - MemoryIndex memoryIndex = new MemoryIndex(false); - memoryIndex.addField("field1", "the quick brown fox jumps over the lazy dog", new WhitespaceAnalyzer()); - memoryIndex.addField("field2", "some more text", new WhitespaceAnalyzer()); - memoryIndex.addField("_field3", "unhide me", new WhitespaceAnalyzer()); - memoryIndex.addField("field4", "123", new WhitespaceAnalyzer()); + public void testFunctionScoreQuery() { + TermQuery termQuery = new TermQuery(new Term("_field", "_value")); + FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(termQuery, new RandomScoreFunction()); + Result result = analyze(functionScoreQuery); + assertThat(result.verified, is(true)); + assertTermsEqual(result.terms, new Term("_field", "_value")); - IndexReader indexReader = memoryIndex.createSearcher().getIndexReader(); - TermsQuery query = (TermsQuery) - createQueryTermsQuery(indexReader, QUERY_TERMS_FIELD, new Term(EXTRACTION_RESULT_FIELD, EXTRACTION_FAILED)); - - PrefixCodedTerms terms = query.getTermData(); - assertThat(terms.size(), equalTo(15L)); - PrefixCodedTerms.TermIterator termIterator = terms.iterator(); - assertTermIterator(termIterator, "_field3\u0000me", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "_field3\u0000unhide", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "field1\u0000brown", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "field1\u0000dog", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "field1\u0000fox", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "field1\u0000jumps", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "field1\u0000lazy", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "field1\u0000over", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "field1\u0000quick", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "field1\u0000the", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "field2\u0000more", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "field2\u0000some", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "field2\u0000text", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, "field4\u0000123", QUERY_TERMS_FIELD); - assertTermIterator(termIterator, EXTRACTION_FAILED, EXTRACTION_RESULT_FIELD); + functionScoreQuery = new FunctionScoreQuery(termQuery, new RandomScoreFunction(), 1f, null, 10f); + result = analyze(functionScoreQuery); + assertThat(result.verified, is(false)); + assertTermsEqual(result.terms, new Term("_field", "_value")); } public void testSelectTermsListWithHighestSumOfTermLength() { @@ -628,11 +551,6 @@ public class ExtractQueryTermsServiceTests extends ESTestCase { assertThat(result, sameInstance(expected)); } - private void assertTermIterator(PrefixCodedTerms.TermIterator termIterator, String expectedValue, String expectedField) { - assertThat(termIterator.next().utf8ToString(), equalTo(expectedValue)); - assertThat(termIterator.field(), equalTo(expectedField)); - } - private static void assertTermsEqual(Set actual, Term... expected) { assertEquals(new HashSet<>(Arrays.asList(expected)), actual); } diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 5bd5aeb3dc0..0d02d1d9474 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -20,4 +20,45 @@ esplugin { description 'The Reindex module adds APIs to reindex from one index to another or update documents in place.' classname 'org.elasticsearch.index.reindex.ReindexPlugin' + hasClientJar = true } + +integTest { + cluster { + // Whitelist reindexing from the local node so we can test it. + setting 'reindex.remote.whitelist', 'myself' + } +} + +run { + // Whitelist reindexing from the local node so we can test it. + setting 'reindex.remote.whitelist', 'myself' +} + + +dependencies { + compile "org.elasticsearch.client:rest:${version}" + // dependencies of the rest client + compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" + compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" + compile "commons-codec:commons-codec:${versions.commonscodec}" + compile "commons-logging:commons-logging:${versions.commonslogging}" + // for http - testing reindex from remote + testCompile project(path: ':modules:transport-netty3', configuration: 'runtime') +} + +dependencyLicenses { + // Don't check the client's license. We know it. + dependencies = project.configurations.runtime.fileCollection { + it.group.startsWith('org.elasticsearch') == false + } - project.configurations.provided +} + +thirdPartyAudit.excludes = [ + // Commons logging + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', +] diff --git a/modules/reindex/licenses/commons-codec-1.10.jar.sha1 b/modules/reindex/licenses/commons-codec-1.10.jar.sha1 new file mode 100644 index 00000000000..3fe8682a1b0 --- /dev/null +++ b/modules/reindex/licenses/commons-codec-1.10.jar.sha1 @@ -0,0 +1 @@ +4b95f4897fa13f2cd904aee711aeafc0c5295cd8 \ No newline at end of file diff --git a/distribution/licenses/netty-LICENSE.txt b/modules/reindex/licenses/commons-codec-LICENSE.txt similarity index 100% rename from distribution/licenses/netty-LICENSE.txt rename to modules/reindex/licenses/commons-codec-LICENSE.txt diff --git a/modules/reindex/licenses/commons-codec-NOTICE.txt b/modules/reindex/licenses/commons-codec-NOTICE.txt new file mode 100644 index 00000000000..56916449bbe --- /dev/null +++ b/modules/reindex/licenses/commons-codec-NOTICE.txt @@ -0,0 +1,17 @@ +Apache Commons Codec +Copyright 2002-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +src/test/org/apache/commons/codec/language/DoubleMetaphoneTest.java +contains test data from http://aspell.net/test/orig/batch0.tab. +Copyright (C) 2002 Kevin Atkinson (kevina@gnu.org) + +=============================================================================== + +The content of package org.apache.commons.codec.language.bm has been translated +from the original php source code available at http://stevemorse.org/phoneticinfo.htm +with permission from the original authors. +Original source copyright: +Copyright (c) 2008 Alexander Beider & Stephen P. Morse. diff --git a/modules/reindex/licenses/commons-logging-1.1.3.jar.sha1 b/modules/reindex/licenses/commons-logging-1.1.3.jar.sha1 new file mode 100644 index 00000000000..c8756c43832 --- /dev/null +++ b/modules/reindex/licenses/commons-logging-1.1.3.jar.sha1 @@ -0,0 +1 @@ +f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f diff --git a/modules/reindex/licenses/commons-logging-LICENSE.txt b/modules/reindex/licenses/commons-logging-LICENSE.txt new file mode 100644 index 00000000000..57bc88a15a0 --- /dev/null +++ b/modules/reindex/licenses/commons-logging-LICENSE.txt @@ -0,0 +1,202 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + diff --git a/modules/reindex/licenses/commons-logging-NOTICE.txt b/modules/reindex/licenses/commons-logging-NOTICE.txt new file mode 100644 index 00000000000..72eb32a9024 --- /dev/null +++ b/modules/reindex/licenses/commons-logging-NOTICE.txt @@ -0,0 +1,5 @@ +Apache Commons CLI +Copyright 2001-2009 The Apache Software Foundation + +This product includes software developed by +The Apache Software Foundation (http://www.apache.org/). diff --git a/modules/reindex/licenses/httpclient-4.5.2.jar.sha1 b/modules/reindex/licenses/httpclient-4.5.2.jar.sha1 new file mode 100644 index 00000000000..6937112a09f --- /dev/null +++ b/modules/reindex/licenses/httpclient-4.5.2.jar.sha1 @@ -0,0 +1 @@ +733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/modules/reindex/licenses/httpclient-LICENSE.txt b/modules/reindex/licenses/httpclient-LICENSE.txt new file mode 100644 index 00000000000..32f01eda18f --- /dev/null +++ b/modules/reindex/licenses/httpclient-LICENSE.txt @@ -0,0 +1,558 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +========================================================================= + +This project includes Public Suffix List copied from + +licensed under the terms of the Mozilla Public License, v. 2.0 + +Full license text: + +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/modules/reindex/licenses/httpclient-NOTICE.txt b/modules/reindex/licenses/httpclient-NOTICE.txt new file mode 100644 index 00000000000..4f6058178b2 --- /dev/null +++ b/modules/reindex/licenses/httpclient-NOTICE.txt @@ -0,0 +1,5 @@ +Apache HttpComponents Client +Copyright 1999-2015 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). diff --git a/modules/reindex/licenses/httpcore-4.4.4.jar.sha1 b/modules/reindex/licenses/httpcore-4.4.4.jar.sha1 new file mode 100644 index 00000000000..ef0c257e012 --- /dev/null +++ b/modules/reindex/licenses/httpcore-4.4.4.jar.sha1 @@ -0,0 +1 @@ +b31526a230871fbe285fbcbe2813f9c0839ae9b0 \ No newline at end of file diff --git a/modules/reindex/licenses/httpcore-LICENSE.txt b/modules/reindex/licenses/httpcore-LICENSE.txt new file mode 100644 index 00000000000..72819a9f06f --- /dev/null +++ b/modules/reindex/licenses/httpcore-LICENSE.txt @@ -0,0 +1,241 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +========================================================================= + +This project contains annotations in the package org.apache.http.annotation +which are derived from JCIP-ANNOTATIONS +Copyright (c) 2005 Brian Goetz and Tim Peierls. +See http://www.jcip.net and the Creative Commons Attribution License +(http://creativecommons.org/licenses/by/2.5) +Full text: http://creativecommons.org/licenses/by/2.5/legalcode + +License + +THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED. + +BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE TO BE BOUND BY THE TERMS OF THIS LICENSE. THE LICENSOR GRANTS YOU THE RIGHTS CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND CONDITIONS. + +1. Definitions + + "Collective Work" means a work, such as a periodical issue, anthology or encyclopedia, in which the Work in its entirety in unmodified form, along with a number of other contributions, constituting separate and independent works in themselves, are assembled into a collective whole. A work that constitutes a Collective Work will not be considered a Derivative Work (as defined below) for the purposes of this License. + "Derivative Work" means a work based upon the Work or upon the Work and other pre-existing works, such as a translation, musical arrangement, dramatization, fictionalization, motion picture version, sound recording, art reproduction, abridgment, condensation, or any other form in which the Work may be recast, transformed, or adapted, except that a work that constitutes a Collective Work will not be considered a Derivative Work for the purpose of this License. For the avoidance of doubt, where the Work is a musical composition or sound recording, the synchronization of the Work in timed-relation with a moving image ("synching") will be considered a Derivative Work for the purpose of this License. + "Licensor" means the individual or entity that offers the Work under the terms of this License. + "Original Author" means the individual or entity who created the Work. + "Work" means the copyrightable work of authorship offered under the terms of this License. + "You" means an individual or entity exercising rights under this License who has not previously violated the terms of this License with respect to the Work, or who has received express permission from the Licensor to exercise rights under this License despite a previous violation. + +2. Fair Use Rights. Nothing in this license is intended to reduce, limit, or restrict any rights arising from fair use, first sale or other limitations on the exclusive rights of the copyright owner under copyright law or other applicable laws. + +3. License Grant. Subject to the terms and conditions of this License, Licensor hereby grants You a worldwide, royalty-free, non-exclusive, perpetual (for the duration of the applicable copyright) license to exercise the rights in the Work as stated below: + + to reproduce the Work, to incorporate the Work into one or more Collective Works, and to reproduce the Work as incorporated in the Collective Works; + to create and reproduce Derivative Works; + to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission the Work including as incorporated in Collective Works; + to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission Derivative Works. + + For the avoidance of doubt, where the work is a musical composition: + Performance Royalties Under Blanket Licenses. Licensor waives the exclusive right to collect, whether individually or via a performance rights society (e.g. ASCAP, BMI, SESAC), royalties for the public performance or public digital performance (e.g. webcast) of the Work. + Mechanical Rights and Statutory Royalties. Licensor waives the exclusive right to collect, whether individually or via a music rights agency or designated agent (e.g. Harry Fox Agency), royalties for any phonorecord You create from the Work ("cover version") and distribute, subject to the compulsory license created by 17 USC Section 115 of the US Copyright Act (or the equivalent in other jurisdictions). + Webcasting Rights and Statutory Royalties. For the avoidance of doubt, where the Work is a sound recording, Licensor waives the exclusive right to collect, whether individually or via a performance-rights society (e.g. SoundExchange), royalties for the public digital performance (e.g. webcast) of the Work, subject to the compulsory license created by 17 USC Section 114 of the US Copyright Act (or the equivalent in other jurisdictions). + +The above rights may be exercised in all media and formats whether now known or hereafter devised. The above rights include the right to make such modifications as are technically necessary to exercise the rights in other media and formats. All rights not expressly granted by Licensor are hereby reserved. + +4. Restrictions.The license granted in Section 3 above is expressly made subject to and limited by the following restrictions: + + You may distribute, publicly display, publicly perform, or publicly digitally perform the Work only under the terms of this License, and You must include a copy of, or the Uniform Resource Identifier for, this License with every copy or phonorecord of the Work You distribute, publicly display, publicly perform, or publicly digitally perform. You may not offer or impose any terms on the Work that alter or restrict the terms of this License or the recipients' exercise of the rights granted hereunder. You may not sublicense the Work. You must keep intact all notices that refer to this License and to the disclaimer of warranties. You may not distribute, publicly display, publicly perform, or publicly digitally perform the Work with any technological measures that control access or use of the Work in a manner inconsistent with the terms of this License Agreement. The above applies to the Work as incorporated in a Collective Work, but this does not require the Collective Work apart from the Work itself to be made subject to the terms of this License. If You create a Collective Work, upon notice from any Licensor You must, to the extent practicable, remove from the Collective Work any credit as required by clause 4(b), as requested. If You create a Derivative Work, upon notice from any Licensor You must, to the extent practicable, remove from the Derivative Work any credit as required by clause 4(b), as requested. + If you distribute, publicly display, publicly perform, or publicly digitally perform the Work or any Derivative Works or Collective Works, You must keep intact all copyright notices for the Work and provide, reasonable to the medium or means You are utilizing: (i) the name of the Original Author (or pseudonym, if applicable) if supplied, and/or (ii) if the Original Author and/or Licensor designate another party or parties (e.g. a sponsor institute, publishing entity, journal) for attribution in Licensor's copyright notice, terms of service or by other reasonable means, the name of such party or parties; the title of the Work if supplied; to the extent reasonably practicable, the Uniform Resource Identifier, if any, that Licensor specifies to be associated with the Work, unless such URI does not refer to the copyright notice or licensing information for the Work; and in the case of a Derivative Work, a credit identifying the use of the Work in the Derivative Work (e.g., "French translation of the Work by Original Author," or "Screenplay based on original Work by Original Author"). Such credit may be implemented in any reasonable manner; provided, however, that in the case of a Derivative Work or Collective Work, at a minimum such credit will appear where any other comparable authorship credit appears and in a manner at least as prominent as such other comparable authorship credit. + +5. Representations, Warranties and Disclaimer + +UNLESS OTHERWISE MUTUALLY AGREED TO BY THE PARTIES IN WRITING, LICENSOR OFFERS THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTIBILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OF ABSENCE OF ERRORS, WHETHER OR NOT DISCOVERABLE. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OF IMPLIED WARRANTIES, SO SUCH EXCLUSION MAY NOT APPLY TO YOU. + +6. Limitation on Liability. EXCEPT TO THE EXTENT REQUIRED BY APPLICABLE LAW, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR ANY SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES ARISING OUT OF THIS LICENSE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. + +7. Termination + + This License and the rights granted hereunder will terminate automatically upon any breach by You of the terms of this License. Individuals or entities who have received Derivative Works or Collective Works from You under this License, however, will not have their licenses terminated provided such individuals or entities remain in full compliance with those licenses. Sections 1, 2, 5, 6, 7, and 8 will survive any termination of this License. + Subject to the above terms and conditions, the license granted here is perpetual (for the duration of the applicable copyright in the Work). Notwithstanding the above, Licensor reserves the right to release the Work under different license terms or to stop distributing the Work at any time; provided, however that any such election will not serve to withdraw this License (or any other license that has been, or is required to be, granted under the terms of this License), and this License will continue in full force and effect unless terminated as stated above. + +8. Miscellaneous + + Each time You distribute or publicly digitally perform the Work or a Collective Work, the Licensor offers to the recipient a license to the Work on the same terms and conditions as the license granted to You under this License. + Each time You distribute or publicly digitally perform a Derivative Work, Licensor offers to the recipient a license to the original Work on the same terms and conditions as the license granted to You under this License. + If any provision of this License is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this License, and without further action by the parties to this agreement, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. + No term or provision of this License shall be deemed waived and no breach consented to unless such waiver or consent shall be in writing and signed by the party to be charged with such waiver or consent. + This License constitutes the entire agreement between the parties with respect to the Work licensed here. There are no understandings, agreements or representations with respect to the Work not specified here. Licensor shall not be bound by any additional provisions that may appear in any communication from You. This License may not be modified without the mutual written agreement of the Licensor and You. diff --git a/modules/reindex/licenses/httpcore-NOTICE.txt b/modules/reindex/licenses/httpcore-NOTICE.txt new file mode 100644 index 00000000000..c0be50a505e --- /dev/null +++ b/modules/reindex/licenses/httpcore-NOTICE.txt @@ -0,0 +1,8 @@ +Apache HttpComponents Core +Copyright 2005-2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +This project contains annotations derived from JCIP-ANNOTATIONS +Copyright (c) 2005 Brian Goetz and Tim Peierls. See http://www.jcip.net diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java index 3403a8077b9..584dd022932 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkByScrollAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; @@ -30,34 +29,24 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.Retry; import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.search.ClearScrollRequest; -import org.elasticsearch.action.search.ClearScrollResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchScrollRequest; -import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.ParentTaskAssigningClient; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; -import org.elasticsearch.search.SearchHit; +import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; +import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; -import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Consumer; import static java.lang.Math.max; import static java.lang.Math.min; @@ -74,46 +63,57 @@ import static org.elasticsearch.search.sort.SortBuilders.fieldSort; * their tests can use them. Most methods run in the listener thread pool because the are meant to be fast and don't expect to block. */ public abstract class AbstractAsyncBulkByScrollAction> { + protected final ESLogger logger; + protected final BulkByScrollTask task; + protected final ThreadPool threadPool; /** * The request for this action. Named mainRequest because we create lots of request variables all representing child * requests of this mainRequest. */ protected final Request mainRequest; - protected final BulkByScrollTask task; private final AtomicLong startTime = new AtomicLong(-1); - private final AtomicReference scroll = new AtomicReference<>(); private final Set destinationIndices = Collections.newSetFromMap(new ConcurrentHashMap<>()); - private final ESLogger logger; private final ParentTaskAssigningClient client; - private final ThreadPool threadPool; - private final SearchRequest firstSearchRequest; private final ActionListener listener; - private final BackoffPolicy backoffPolicy; private final Retry bulkRetry; + private final ScrollableHitSource scrollSource; public AbstractAsyncBulkByScrollAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, Request mainRequest, SearchRequest firstSearchRequest, - ActionListener listener) { + ThreadPool threadPool, Request mainRequest, ActionListener listener) { this.task = task; this.logger = logger; this.client = client; this.threadPool = threadPool; this.mainRequest = mainRequest; - this.firstSearchRequest = firstSearchRequest; this.listener = listener; - backoffPolicy = buildBackoffPolicy(); - bulkRetry = Retry.on(EsRejectedExecutionException.class).policy(wrapBackoffPolicy(backoffPolicy)); + BackoffPolicy backoffPolicy = buildBackoffPolicy(); + bulkRetry = Retry.on(EsRejectedExecutionException.class).policy(BackoffPolicy.wrap(backoffPolicy, task::countBulkRetry)); + scrollSource = buildScrollableResultSource(backoffPolicy); + /* + * Default to sorting by doc. We can't do this in the request itself because it is normal to *add* to the sorts rather than replace + * them and if we add _doc as the first sort by default then sorts will never work.... So we add it here, only if there isn't + * another sort. + */ + List> sorts = mainRequest.getSearchRequest().source().sorts(); + if (sorts == null || sorts.isEmpty()) { + mainRequest.getSearchRequest().source().sort(fieldSort("_doc")); + } } - protected abstract BulkRequest buildBulk(Iterable docs); + protected abstract BulkRequest buildBulk(Iterable docs); + + protected ScrollableHitSource buildScrollableResultSource(BackoffPolicy backoffPolicy) { + return new ClientScrollableHitSource(logger, backoffPolicy, threadPool, task::countSearchRetry, this::finishHim, client, + mainRequest.getSearchRequest()); + } /** * Build the response for reindex actions. */ protected BulkIndexByScrollResponse buildResponse(TimeValue took, List indexingFailures, - List searchFailures, boolean timedOut) { + List searchFailures, boolean timedOut) { return new BulkIndexByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures, timedOut); } @@ -126,50 +126,33 @@ public abstract class AbstractAsyncBulkByScrollAction onScrollResponse(timeValueNanos(System.nanoTime()), 0, response)); + } catch (Exception e) { + finishHim(e); } - searchWithRetry(listener -> client.search(firstSearchRequest, listener), (SearchResponse response) -> { - logger.debug("[{}] documents match query", response.getHits().getTotalHits()); - onScrollResponse(timeValueNanos(System.nanoTime()), 0, response); - }); } /** * Process a scroll response. * @param lastBatchStartTime the time when the last batch started. Used to calculate the throttling delay. * @param lastBatchSize the size of the last batch. Used to calculate the throttling delay. - * @param searchResponse the scroll response to process + * @param response the scroll response to process */ - void onScrollResponse(TimeValue lastBatchStartTime, int lastBatchSize, SearchResponse searchResponse) { + void onScrollResponse(TimeValue lastBatchStartTime, int lastBatchSize, ScrollableHitSource.Response response) { if (task.isCancelled()) { finishHim(null); return; } - setScroll(searchResponse.getScrollId()); if ( // If any of the shards failed that should abort the request. - (searchResponse.getShardFailures() != null && searchResponse.getShardFailures().length > 0) + (response.getFailures().size() > 0) // Timeouts aren't shard failures but we still need to pass them back to the user. - || searchResponse.isTimedOut() + || response.isTimedOut() ) { - startNormalTermination(emptyList(), unmodifiableList(Arrays.asList(searchResponse.getShardFailures())), - searchResponse.isTimedOut()); + refreshAndFinish(emptyList(), response.getFailures(), response.isTimedOut()); return; } - long total = searchResponse.getHits().totalHits(); + long total = response.getTotalHits(); if (mainRequest.getSize() > 0) { total = min(total, mainRequest.getSize()); } @@ -181,12 +164,12 @@ public abstract class AbstractAsyncBulkByScrollAction docsIterable = Arrays.asList(docs); + List hits = response.getHits(); if (mainRequest.getSize() != SIZE_ALL_MATCHES) { - // Truncate the docs if we have more than the request size + // Truncate the hits if we have more than the request size long remaining = max(0, mainRequest.getSize() - task.getSuccessfullyProcessed()); - if (remaining < docs.length) { - docsIterable = docsIterable.subList(0, (int) remaining); + if (remaining < hits.size()) { + hits = hits.subList(0, (int) remaining); } } - BulkRequest request = buildBulk(docsIterable); + BulkRequest request = buildBulk(hits); if (request.requests().isEmpty()) { /* * If we noop-ed the entire batch then just skip to the next batch or the BulkRequest would fail validation. @@ -250,7 +231,7 @@ public abstract class AbstractAsyncBulkByScrollAction= mainRequest.getSize()) { // We've processed all the requested docs. - startNormalTermination(emptyList(), emptyList(), false); + refreshAndFinish(emptyList(), emptyList(), false); return; } startNextScroll(thisBatchStartTime, response.getItems().length); - } catch (Throwable t) { + } catch (Exception t) { finishHim(t); } } @@ -324,11 +305,8 @@ public abstract class AbstractAsyncBulkByScrollAction client.searchScroll(request, listener), (SearchResponse response) -> { + TimeValue extraKeepAlive = task.throttleWaitTime(lastBatchStartTime, lastBatchSize); + scrollSource.startNextScroll(extraKeepAlive, response -> { onScrollResponse(lastBatchStartTime, lastBatchSize, response); }); } @@ -344,9 +322,10 @@ public abstract class AbstractAsyncBulkByScrollAction indexingFailures, List searchFailures, boolean timedOut) { + void refreshAndFinish(List indexingFailures, List searchFailures, boolean timedOut) { if (task.isCancelled() || false == mainRequest.isRefresh() || destinationIndices.isEmpty()) { finishHim(null, indexingFailures, searchFailures, timedOut); return; @@ -360,7 +339,7 @@ public abstract class AbstractAsyncBulkByScrollAction indexingFailures, List searchFailures, boolean timedOut) { - String scrollId = scroll.get(); - if (Strings.hasLength(scrollId)) { - /* - * Fire off the clear scroll but don't wait for it it return before - * we send the use their response. - */ - ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); - clearScrollRequest.addScrollId(scrollId); - /* - * Unwrap the client so we don't set our task as the parent. If we *did* set our ID then the clear scroll would be cancelled as - * if this task is cancelled. But we want to clear the scroll regardless of whether or not the main request was cancelled. - */ - client.unwrap().clearScroll(clearScrollRequest, new ActionListener() { - @Override - public void onResponse(ClearScrollResponse response) { - logger.debug("Freed [{}] contexts", response.getNumFreed()); - } - - @Override - public void onFailure(Throwable e) { - logger.warn("Failed to clear scroll [{}]", e, scrollId); - } - }); - } + void finishHim(Exception failure, List indexingFailures, List searchFailures, boolean timedOut) { + scrollSource.close(); if (failure == null) { listener.onResponse( buildResponse(timeValueNanos(System.nanoTime() - startTime.get()), indexingFailures, searchFailures, timedOut)); @@ -435,75 +390,6 @@ public abstract class AbstractAsyncBulkByScrollAction iterator() { - return new Iterator() { - private final Iterator delegate = backoffPolicy.iterator(); - @Override - public boolean hasNext() { - return delegate.hasNext(); - } - - @Override - public TimeValue next() { - if (false == delegate.hasNext()) { - return null; - } - task.countBulkRetry(); - return delegate.next(); - } - }; - } - }; - } - - /** - * Run a search action and call onResponse when a the response comes in, retrying if the action fails with an exception caused by - * rejected execution. - * - * @param action consumes a listener and starts the action. The listener it consumes is rigged to retry on failure. - * @param onResponse consumes the response from the action - */ - private void searchWithRetry(Consumer> action, Consumer onResponse) { - class RetryHelper extends AbstractRunnable implements ActionListener { - private final Iterator retries = backoffPolicy.iterator(); - - @Override - public void onResponse(T response) { - onResponse.accept(response); - } - - @Override - protected void doRun() throws Exception { - action.accept(this); - } - - @Override - public void onFailure(Throwable e) { - if (ExceptionsHelper.unwrap(e, EsRejectedExecutionException.class) != null) { - if (retries.hasNext()) { - TimeValue delay = retries.next(); - logger.trace("retrying rejected search after [{}]", e, delay); - threadPool.schedule(delay, ThreadPool.Names.SAME, this); - task.countSearchRetry(); - } else { - logger.warn("giving up on search because we retried {} times without success", e, retries); - finishHim(e); - } - } else { - logger.warn("giving up on search because it failed with a non-retryable exception", e); - finishHim(e); - } - } - } - new RetryHelper().run(); + scrollSource.setScroll(scroll); } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java index 60ab088d76c..4b87df46312 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.logging.ESLogger; @@ -44,8 +43,6 @@ import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHitField; import org.elasticsearch.threadpool.ThreadPool; import java.util.Arrays; @@ -72,13 +69,13 @@ public abstract class AbstractAsyncBulkIndexByScrollAction, SearchHit, RequestWrapper> scriptApplier; + private final BiFunction, ScrollableHitSource.Hit, RequestWrapper> scriptApplier; public AbstractAsyncBulkIndexByScrollAction(BulkByScrollTask task, ESLogger logger, ParentTaskAssigningClient client, - ThreadPool threadPool, Request mainRequest, SearchRequest firstSearchRequest, + ThreadPool threadPool, Request mainRequest, ActionListener listener, ScriptService scriptService, ClusterState clusterState) { - super(task, logger, client, threadPool, mainRequest, firstSearchRequest, listener); + super(task, logger, client, threadPool, mainRequest, listener); this.scriptService = scriptService; this.clusterState = clusterState; this.scriptApplier = Objects.requireNonNull(buildScriptApplier(), "script applier must not be null"); @@ -87,15 +84,15 @@ public abstract class AbstractAsyncBulkIndexByScrollAction, SearchHit, RequestWrapper> buildScriptApplier() { + protected BiFunction, ScrollableHitSource.Hit, RequestWrapper> buildScriptApplier() { // The default script applier executes a no-op return (request, searchHit) -> request; } @Override - protected BulkRequest buildBulk(Iterable docs) { + protected BulkRequest buildBulk(Iterable docs) { BulkRequest bulkRequest = new BulkRequest(); - for (SearchHit doc : docs) { + for (ScrollableHitSource.Hit doc : docs) { if (accept(doc)) { RequestWrapper request = scriptApplier.apply(copyMetadata(buildRequest(doc), doc), doc); if (request != null) { @@ -111,14 +108,14 @@ public abstract class AbstractAsyncBulkIndexByScrollAction buildRequest(SearchHit doc); + protected abstract RequestWrapper buildRequest(ScrollableHitSource.Hit doc); /** * Copies the metadata from a hit to the request. */ - protected RequestWrapper copyMetadata(RequestWrapper request, SearchHit doc) { - copyParent(request, fieldValue(doc, ParentFieldMapper.NAME)); - copyRouting(request, fieldValue(doc, RoutingFieldMapper.NAME)); + protected RequestWrapper copyMetadata(RequestWrapper request, ScrollableHitSource.Hit doc) { + request.setParent(doc.getParent()); + copyRouting(request, doc.getRouting()); // Comes back as a Long but needs to be a string - Long timestamp = fieldValue(doc, TimestampFieldMapper.NAME); + Long timestamp = doc.getTimestamp(); if (timestamp != null) { request.setTimestamp(timestamp.toString()); } - Long ttl = fieldValue(doc, TTLFieldMapper.NAME); + Long ttl = doc.getTTL(); if (ttl != null) { request.setTtl(ttl); } return request; } - /** - * Copy the parent from a search hit to the request. - */ - protected void copyParent(RequestWrapper request, String parent) { - request.setParent(parent); - } - /** * Copy the routing from a search hit to the request. */ @@ -163,11 +153,6 @@ public abstract class AbstractAsyncBulkIndexByScrollAction T fieldValue(SearchHit doc, String fieldName) { - SearchHitField field = doc.field(fieldName); - return field == null ? null : field.value(); - } - /** * Wrapper for the {@link ActionRequest} that are used in this action class. */ @@ -435,7 +420,7 @@ public abstract class AbstractAsyncBulkIndexByScrollAction, SearchHit, RequestWrapper> { + public abstract class ScriptApplier implements BiFunction, ScrollableHitSource.Hit, RequestWrapper> { private final BulkByScrollTask task; private final ScriptService scriptService; @@ -455,7 +440,7 @@ public abstract class AbstractAsyncBulkIndexByScrollAction apply(RequestWrapper request, SearchHit doc) { + public RequestWrapper apply(RequestWrapper request, ScrollableHitSource.Hit doc) { if (script == null) { return request; } @@ -467,18 +452,18 @@ public abstract class AbstractAsyncBulkIndexByScrollAction(); } - context.put(IndexFieldMapper.NAME, doc.index()); - context.put(TypeFieldMapper.NAME, doc.type()); - context.put(IdFieldMapper.NAME, doc.id()); + context.put(IndexFieldMapper.NAME, doc.getIndex()); + context.put(TypeFieldMapper.NAME, doc.getType()); + context.put(IdFieldMapper.NAME, doc.getId()); Long oldVersion = doc.getVersion(); context.put(VersionFieldMapper.NAME, oldVersion); - String oldParent = fieldValue(doc, ParentFieldMapper.NAME); + String oldParent = doc.getParent(); context.put(ParentFieldMapper.NAME, oldParent); - String oldRouting = fieldValue(doc, RoutingFieldMapper.NAME); + String oldRouting = doc.getRouting(); context.put(RoutingFieldMapper.NAME, oldRouting); - Long oldTimestamp = fieldValue(doc, TimestampFieldMapper.NAME); + Long oldTimestamp = doc.getTimestamp(); context.put(TimestampFieldMapper.NAME, oldTimestamp); - Long oldTTL = fieldValue(doc, TTLFieldMapper.NAME); + Long oldTTL = doc.getTTL(); context.put(TTLFieldMapper.NAME, oldTTL); context.put(SourceFieldMapper.NAME, request.getSource()); @@ -501,15 +486,15 @@ public abstract class AbstractAsyncBulkIndexByScrollAction) resultCtx.remove(SourceFieldMapper.NAME)); Object newValue = context.remove(IndexFieldMapper.NAME); - if (false == doc.index().equals(newValue)) { + if (false == doc.getIndex().equals(newValue)) { scriptChangedIndex(request, newValue); } newValue = context.remove(TypeFieldMapper.NAME); - if (false == doc.type().equals(newValue)) { + if (false == doc.getType().equals(newValue)) { scriptChangedType(request, newValue); } newValue = context.remove(IdFieldMapper.NAME); - if (false == doc.id().equals(newValue)) { + if (false == doc.getId().equals(newValue)) { scriptChangedId(request, newValue); } newValue = context.remove(VersionFieldMapper.NAME); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java index 284e51e054f..f7bcc9d1d6c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java @@ -20,9 +20,9 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.WriteConsistencyLevel; -import org.elasticsearch.action.support.TransportAction; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -43,19 +43,19 @@ import java.util.Map; public abstract class AbstractBaseReindexRestHandler< Request extends AbstractBulkByScrollRequest, - TA extends TransportAction + A extends GenericAction > extends BaseRestHandler { protected final IndicesQueriesRegistry indicesQueriesRegistry; protected final AggregatorParsers aggParsers; protected final Suggesters suggesters; private final ClusterService clusterService; - private final TA action; + private final A action; - protected AbstractBaseReindexRestHandler(Settings settings, Client client, - IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters, - ClusterService clusterService, TA action) { - super(settings, client); + protected AbstractBaseReindexRestHandler(Settings settings, IndicesQueriesRegistry indicesQueriesRegistry, + AggregatorParsers aggParsers, Suggesters suggesters, + ClusterService clusterService, A action) { + super(settings); this.indicesQueriesRegistry = indicesQueriesRegistry; this.aggParsers = aggParsers; this.suggesters = suggesters; @@ -63,9 +63,8 @@ public abstract class AbstractBaseReindexRestHandler< this.action = action; } - protected void handleRequest(RestRequest request, RestChannel channel, + protected void handleRequest(RestRequest request, RestChannel channel, NodeClient client, boolean includeCreated, boolean includeUpdated) throws IOException { - // Build the internal request Request internal = setCommonOptions(request, buildRequest(request)); @@ -75,14 +74,14 @@ public abstract class AbstractBaseReindexRestHandler< params.put(BulkByScrollTask.Status.INCLUDE_CREATED, Boolean.toString(includeCreated)); params.put(BulkByScrollTask.Status.INCLUDE_UPDATED, Boolean.toString(includeUpdated)); - action.execute(internal, new BulkIndexByScrollResponseContentListener<>(channel, params)); + client.executeLocally(action, internal, new BulkIndexByScrollResponseContentListener(channel, params)); return; } else { internal.setShouldPersistResult(true); } /* - * Lets try and validate before forking so the user gets some error. The + * Let's try and validate before forking so the user gets some error. The * task can't totally validate until it starts but this is better than * nothing. */ @@ -91,7 +90,7 @@ public abstract class AbstractBaseReindexRestHandler< channel.sendResponse(new BytesRestResponse(channel, validationException)); return; } - sendTask(channel, action.execute(internal, LoggingTaskListener.instance())); + sendTask(channel, client.executeLocally(action, internal, LoggingTaskListener.instance())); } /** @@ -138,20 +137,20 @@ public abstract class AbstractBaseReindexRestHandler< if (requestsPerSecondString == null) { return null; } - if ("unlimited".equals(requestsPerSecondString)) { - return Float.POSITIVE_INFINITY; - } float requestsPerSecond; try { requestsPerSecond = Float.parseFloat(requestsPerSecondString); } catch (NumberFormatException e) { throw new IllegalArgumentException( - "[requests_per_second] must be a float greater than 0. Use \"unlimited\" to disable throttling.", e); + "[requests_per_second] must be a float greater than 0. Use -1 to disable throttling.", e); + } + if (requestsPerSecond == -1) { + return Float.POSITIVE_INFINITY; } if (requestsPerSecond <= 0) { - // We validate here and in the setters because the setters use "Float.POSITIVE_INFINITY" instead of "unlimited" + // We validate here and in the setters because the setters use "Float.POSITIVE_INFINITY" instead of -1 throw new IllegalArgumentException( - "[requests_per_second] must be a float greater than 0. Use \"unlimited\" to disable throttling."); + "[requests_per_second] must be a float greater than 0. Use -1 to disable throttling."); } return requestsPerSecond; } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java index 926da3befdd..41f103698d6 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java @@ -19,9 +19,8 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.support.TransportAction; -import org.elasticsearch.client.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; @@ -48,12 +47,12 @@ import static org.elasticsearch.index.reindex.AbstractBulkByScrollRequest.SIZE_A */ public abstract class AbstractBulkByQueryRestHandler< Request extends AbstractBulkByScrollRequest, - TA extends TransportAction> extends AbstractBaseReindexRestHandler { + A extends GenericAction> extends AbstractBaseReindexRestHandler { - protected AbstractBulkByQueryRestHandler(Settings settings, Client client, IndicesQueriesRegistry indicesQueriesRegistry, + protected AbstractBulkByQueryRestHandler(Settings settings, IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters, ClusterService clusterService, - TA action) { - super(settings, client, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); + A action) { + super(settings, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); } protected void parseInternalRequest(Request internal, RestRequest restRequest, diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java index 80a6ff891da..7725ee7f519 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java @@ -123,8 +123,8 @@ public abstract class AbstractBulkByScrollRequest indexingFailures; - private List searchFailures; + private List bulkFailures; + private List searchFailures; private boolean timedOut; public BulkIndexByScrollResponse() { } - public BulkIndexByScrollResponse(TimeValue took, BulkByScrollTask.Status status, List indexingFailures, - List searchFailures, boolean timedOut) { + public BulkIndexByScrollResponse(TimeValue took, BulkByScrollTask.Status status, List bulkFailures, + List searchFailures, boolean timedOut) { this.took = took; this.status = requireNonNull(status, "Null status not supported"); - this.indexingFailures = indexingFailures; + this.bulkFailures = bulkFailures; this.searchFailures = searchFailures; this.timedOut = timedOut; } @@ -113,17 +110,16 @@ public class BulkIndexByScrollResponse extends ActionResponse implements ToXCont } /** - * All of the indexing failures. Version conflicts are only included if the request sets abortOnVersionConflict to true (the - * default). + * All of the bulk failures. Version conflicts are only included if the request sets abortOnVersionConflict to true (the default). */ - public List getIndexingFailures() { - return indexingFailures; + public List getBulkFailures() { + return bulkFailures; } /** * All search failures. */ - public List getSearchFailures() { + public List getSearchFailures() { return searchFailures; } @@ -139,14 +135,8 @@ public class BulkIndexByScrollResponse extends ActionResponse implements ToXCont super.writeTo(out); took.writeTo(out); status.writeTo(out); - out.writeVInt(indexingFailures.size()); - for (Failure failure: indexingFailures) { - failure.writeTo(out); - } - out.writeVInt(searchFailures.size()); - for (ShardSearchFailure failure: searchFailures) { - failure.writeTo(out); - } + out.writeList(bulkFailures); + out.writeList(searchFailures); out.writeBoolean(timedOut); } @@ -155,19 +145,9 @@ public class BulkIndexByScrollResponse extends ActionResponse implements ToXCont super.readFrom(in); took = new TimeValue(in); status = new BulkByScrollTask.Status(in); - int indexingFailuresCount = in.readVInt(); - List indexingFailures = new ArrayList<>(indexingFailuresCount); - for (int i = 0; i < indexingFailuresCount; i++) { - indexingFailures.add(new Failure(in)); - } - this.indexingFailures = unmodifiableList(indexingFailures); - int searchFailuresCount = in.readVInt(); - List searchFailures = new ArrayList<>(searchFailuresCount); - for (int i = 0; i < searchFailuresCount; i++) { - searchFailures.add(readShardSearchFailure(in)); - } - this.searchFailures = unmodifiableList(searchFailures); - this.timedOut = in.readBoolean(); + bulkFailures = in.readList(Failure::new); + searchFailures = in.readList(SearchFailure::new); + timedOut = in.readBoolean(); } @Override @@ -176,15 +156,13 @@ public class BulkIndexByScrollResponse extends ActionResponse implements ToXCont builder.field("timed_out", timedOut); status.innerXContent(builder, params); builder.startArray("failures"); - for (Failure failure: indexingFailures) { + for (Failure failure: bulkFailures) { builder.startObject(); failure.toXContent(builder, params); builder.endObject(); } - for (ShardSearchFailure failure: searchFailures) { - builder.startObject(); + for (SearchFailure failure: searchFailures) { failure.toXContent(builder, params); - builder.endObject(); } builder.endArray(); return builder; @@ -197,7 +175,7 @@ public class BulkIndexByScrollResponse extends ActionResponse implements ToXCont builder.append("took=").append(took).append(','); builder.append("timed_out=").append(timedOut).append(','); status.innerToString(builder); - builder.append(",indexing_failures=").append(getIndexingFailures().subList(0, min(3, getIndexingFailures().size()))); + builder.append(",bulk_failures=").append(getBulkFailures().subList(0, min(3, getBulkFailures().size()))); builder.append(",search_failures=").append(getSearchFailures().subList(0, min(3, getSearchFailures().size()))); return builder.append(']').toString(); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java index 72bf6957e12..cc4096f9421 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseContentListener.java @@ -21,9 +21,9 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; -import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestResponse; @@ -35,7 +35,7 @@ import java.util.Map; /** * RestBuilderListener that returns higher than 200 status if there are any failures and allows to set XContent.Params. */ -public class BulkIndexByScrollResponseContentListener extends RestBuilderListener { +public class BulkIndexByScrollResponseContentListener extends RestBuilderListener { private final Map params; @@ -45,14 +45,14 @@ public class BulkIndexByScrollResponseContentListener status.getStatus()) { status = failure.getStatus(); } } - for (ShardSearchFailure failure: response.getSearchFailures()) { - RestStatus failureStatus = ExceptionsHelper.status(failure.getCause()); + for (SearchFailure failure: response.getSearchFailures()) { + RestStatus failureStatus = ExceptionsHelper.status(failure.getReason()); if (failureStatus.getStatus() > status.getStatus()) { status = failureStatus; } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java new file mode 100644 index 00000000000..5e694e2cf26 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -0,0 +1,251 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BackoffPolicy; +import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchScrollRequest; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.client.Client; +import org.elasticsearch.client.ParentTaskAssigningClient; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.mapper.internal.ParentFieldMapper; +import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; +import org.elasticsearch.index.mapper.internal.TTLFieldMapper; +import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHitField; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.function.Consumer; + +import static java.util.Collections.emptyList; +import static java.util.Collections.unmodifiableList; +import static org.elasticsearch.common.unit.TimeValue.timeValueNanos; +import static org.elasticsearch.common.util.CollectionUtils.isEmpty; + +/** + * A scrollable source of hits from a {@linkplain Client} instance. + */ +public class ClientScrollableHitSource extends ScrollableHitSource { + private final ParentTaskAssigningClient client; + private final SearchRequest firstSearchRequest; + + public ClientScrollableHitSource(ESLogger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry, + Consumer fail, ParentTaskAssigningClient client, SearchRequest firstSearchRequest) { + super(logger, backoffPolicy, threadPool, countSearchRetry, fail); + this.client = client; + this.firstSearchRequest = firstSearchRequest; + } + + @Override + public void doStart(Consumer onResponse) { + if (logger.isDebugEnabled()) { + logger.debug("executing initial scroll against {}{}", + isEmpty(firstSearchRequest.indices()) ? "all indices" : firstSearchRequest.indices(), + isEmpty(firstSearchRequest.types()) ? "" : firstSearchRequest.types()); + } + searchWithRetry(listener -> client.search(firstSearchRequest, listener), r -> consume(r, onResponse)); + } + + @Override + protected void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, Consumer onResponse) { + SearchScrollRequest request = new SearchScrollRequest(); + // Add the wait time into the scroll timeout so it won't timeout while we wait for throttling + request.scrollId(scrollId).scroll(timeValueNanos(firstSearchRequest.scroll().keepAlive().nanos() + extraKeepAlive.nanos())); + searchWithRetry(listener -> client.searchScroll(request, listener), r -> consume(r, onResponse)); + } + + @Override + public void clearScroll(String scrollId) { + /* + * Fire off the clear scroll but don't wait for it it return before + * we send the use their response. + */ + ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); + clearScrollRequest.addScrollId(scrollId); + /* + * Unwrap the client so we don't set our task as the parent. If we *did* set our ID then the clear scroll would be cancelled as + * if this task is cancelled. But we want to clear the scroll regardless of whether or not the main request was cancelled. + */ + client.unwrap().clearScroll(clearScrollRequest, new ActionListener() { + @Override + public void onResponse(ClearScrollResponse response) { + logger.debug("Freed [{}] contexts", response.getNumFreed()); + } + + @Override + public void onFailure(Exception e) { + logger.warn("Failed to clear scroll [{}]", e, scrollId); + } + }); + } + + /** + * Run a search action and call onResponse when a the response comes in, retrying if the action fails with an exception caused by + * rejected execution. + * + * @param action consumes a listener and starts the action. The listener it consumes is rigged to retry on failure. + * @param onResponse consumes the response from the action + */ + private void searchWithRetry(Consumer> action, Consumer onResponse) { + /* + * RetryHelper is both an AbstractRunnable and an ActionListener - meaning that it both starts the search and + * handles reacts to the results. The complexity is all in onFailure which either adapts the failure to the "fail" listener or + * retries the search. Since both AbstractRunnable and ActionListener define the onFailure method it is called for either failure + * to run the action (either while running or before starting) and for failure on the response from the action. + */ + class RetryHelper extends AbstractRunnable implements ActionListener { + private final Iterator retries = backoffPolicy.iterator(); + private volatile int retryCount = 0; + + @Override + protected void doRun() throws Exception { + action.accept(this); + } + + @Override + public void onResponse(SearchResponse response) { + onResponse.accept(response); + } + + @Override + public void onFailure(Exception e) { + if (ExceptionsHelper.unwrap(e, EsRejectedExecutionException.class) != null) { + if (retries.hasNext()) { + retryCount += 1; + TimeValue delay = retries.next(); + logger.trace("retrying rejected search after [{}]", e, delay); + countSearchRetry.run(); + threadPool.schedule(delay, ThreadPool.Names.SAME, this); + } else { + logger.warn("giving up on search because we retried [{}] times without success", e, retryCount); + fail.accept(e); + } + } else { + logger.warn("giving up on search because it failed with a non-retryable exception", e); + fail.accept(e); + } + } + } + new RetryHelper().run(); + } + + private void consume(SearchResponse response, Consumer onResponse) { + onResponse.accept(wrap(response)); + } + + private Response wrap(SearchResponse response) { + List failures; + if (response.getShardFailures() == null) { + failures = emptyList(); + } else { + failures = new ArrayList<>(response.getShardFailures().length); + for (ShardSearchFailure failure: response.getShardFailures()) { + String nodeId = failure.shard() == null ? null : failure.shard().nodeId(); + failures.add(new SearchFailure(failure.getCause(), failure.index(), failure.shardId(), nodeId)); + } + } + List hits; + if (response.getHits().getHits() == null || response.getHits().getHits().length == 0) { + hits = emptyList(); + } else { + hits = new ArrayList<>(response.getHits().getHits().length); + for (SearchHit hit: response.getHits().getHits()) { + hits.add(new ClientHit(hit)); + } + hits = unmodifiableList(hits); + } + return new Response(response.isTimedOut(), failures, response.getHits().getTotalHits(), + hits, response.getScrollId()); + } + + private static class ClientHit implements Hit { + private final SearchHit delegate; + private final BytesReference source; + + public ClientHit(SearchHit delegate) { + this.delegate = delegate; + source = delegate.hasSource() ? null : delegate.getSourceRef(); + } + + @Override + public String getIndex() { + return delegate.getIndex(); + } + + @Override + public String getType() { + return delegate.getType(); + } + + @Override + public String getId() { + return delegate.getId(); + } + + @Override + public BytesReference getSource() { + return source; + } + + @Override + public long getVersion() { + return delegate.getVersion(); + } + + @Override + public String getParent() { + return fieldValue(ParentFieldMapper.NAME); + } + + @Override + public String getRouting() { + return fieldValue(RoutingFieldMapper.NAME); + } + + @Override + public Long getTimestamp() { + return fieldValue(TimestampFieldMapper.NAME); + } + + @Override + public Long getTTL() { + return fieldValue(TTLFieldMapper.NAME); + } + + private T fieldValue(String fieldName) { + SearchHitField field = delegate.field(fieldName); + return field == null ? null : field.value(); + } + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java index 5a7b81a7124..4f2cb2578ac 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexPlugin.java @@ -23,12 +23,15 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestHandler; import java.util.Arrays; import java.util.List; +import static java.util.Collections.singletonList; + public class ReindexPlugin extends Plugin implements ActionPlugin { public static final String NAME = "reindex"; @@ -49,4 +52,9 @@ public class ReindexPlugin extends Plugin implements ActionPlugin { public void onModule(NetworkModule networkModule) { networkModule.registerTaskStatus(BulkByScrollTask.Status.NAME, BulkByScrollTask.Status::new); } + + @Override + public List> getSettings() { + return singletonList(TransportReindexAction.REMOTE_CLUSTER_WHITELIST); + } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java index 660815bbf52..8c11cd3430f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java @@ -27,11 +27,13 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.index.reindex.remote.RemoteInfo; import java.io.IOException; import java.util.Arrays; import java.util.List; +import static java.util.Collections.singletonList; import static java.util.Collections.unmodifiableList; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.index.VersionType.INTERNAL; @@ -48,6 +50,8 @@ public class ReindexRequest extends AbstractBulkIndexByScrollRequest subRequests() { assert getSearchRequest() != null; assert getDestination() != null; + if (remoteInfo != null) { + return singletonList(getDestination()); + } return unmodifiableList(Arrays.asList(getSearchRequest(), getDestination())); } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java index 4f814dbc49d..1eadf2c15bc 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.index.reindex.remote.RemoteInfo; public class ReindexRequestBuilder extends AbstractBulkIndexByScrollRequestBuilder { @@ -67,4 +68,12 @@ public class ReindexRequestBuilder extends destination.setIndex(index).setType(type); return this; } + + /** + * Setup reindexing from a remote cluster. + */ + public ReindexRequestBuilder setRemoteInfo(RemoteInfo remoteInfo) { + request().setRemoteInfo(remoteInfo); + return this; + } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java index bb894584c8b..704bcf2dbaf 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestDeleteByQueryAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -39,23 +39,23 @@ import java.util.function.Consumer; import static org.elasticsearch.rest.RestRequest.Method.POST; -public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler { +public class RestDeleteByQueryAction extends AbstractBulkByQueryRestHandler { @Inject - public RestDeleteByQueryAction(Settings settings, RestController controller, Client client, + public RestDeleteByQueryAction(Settings settings, RestController controller, IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters, - ClusterService clusterService, TransportDeleteByQueryAction action) { - super(settings, client, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); + ClusterService clusterService) { + super(settings, indicesQueriesRegistry, aggParsers, suggesters, clusterService, DeleteByQueryAction.INSTANCE); controller.registerHandler(POST, "/{index}/_delete_by_query", this); controller.registerHandler(POST, "/{index}/{type}/_delete_by_query", this); } @Override - protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception { + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { if (false == request.hasContent()) { throw new ElasticsearchException("_delete_by_query requires a request body"); } - handleRequest(request, channel, false, false); + handleRequest(request, channel, client, false, false); } @Override diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index 22fcd390430..d40c1ea6622 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -22,21 +22,26 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcherSupplier; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.reindex.remote.RemoteInfo; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; @@ -48,36 +53,39 @@ import org.elasticsearch.search.suggest.Suggesters; import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import static java.util.Objects.requireNonNull; import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.rest.RestRequest.Method.POST; /** - * Expose IndexBySearchRequest over rest. + * Expose reindex over rest. */ -public class RestReindexAction extends AbstractBaseReindexRestHandler { - - private static final ObjectParser PARSER = new ObjectParser<>("reindex"); +public class RestReindexAction extends AbstractBaseReindexRestHandler { + static final ObjectParser PARSER = new ObjectParser<>("reindex"); + private static final Pattern HOST_PATTERN = Pattern.compile("(?[^:]+)://(?[^:]+):(?\\d+)"); static { - ObjectParser.Parser sourceParser = (parser, search, context) -> { - /* - * Extract the parameters that we need from the source sent to the parser. We could do away with this hack when search source - * has an ObjectParser. - */ + ObjectParser.Parser sourceParser = (parser, request, context) -> { + // Funky hack to work around Search not having a proper ObjectParser and us wanting to extract query if using remote. Map source = parser.map(); String[] indices = extractStringArray(source, "index"); if (indices != null) { - search.indices(indices); + request.getSearchRequest().indices(indices); } String[] types = extractStringArray(source, "type"); if (types != null) { - search.types(types); + request.getSearchRequest().types(types); } + request.setRemoteInfo(buildRemoteInfo(source)); XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()); builder.map(source); try (XContentParser innerParser = parser.contentType().xContent().createParser(builder.bytes())) { - search.source().parseXContent(context.queryParseContext(innerParser), context.aggParsers, context.suggesters); + request.getSearchRequest().source().parseXContent(context.queryParseContext(innerParser), context.aggParsers, + context.suggesters); } }; @@ -94,7 +102,7 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler i.ttl(parseTimeValue(ttl, TimeValue.timeValueMillis(-1), "ttl").millis()), new ParseField("ttl")); - PARSER.declareField((p, v, c) -> sourceParser.parse(p, v.getSearchRequest(), c), new ParseField("source"), ValueType.OBJECT); + PARSER.declareField((p, v, c) -> sourceParser.parse(p, v, c), new ParseField("source"), ValueType.OBJECT); PARSER.declareField((p, v, c) -> destParser.parse(p, v.getDestination(), c), new ParseField("dest"), ValueType.OBJECT); PARSER.declareInt(ReindexRequest::setSize, new ParseField("size")); PARSER.declareField((p, v, c) -> v.setScript(Script.parse(p, c.getParseFieldMatcher())), new ParseField("script"), @@ -103,19 +111,19 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler source) throws IOException { + @SuppressWarnings("unchecked") + Map remote = (Map) source.remove("remote"); + if (remote == null) { + return null; + } + String username = extractString(remote, "username"); + String password = extractString(remote, "password"); + String hostInRequest = requireNonNull(extractString(remote, "host"), "[host] must be specified to reindex from a remote cluster"); + Matcher hostMatcher = HOST_PATTERN.matcher(hostInRequest); + if (false == hostMatcher.matches()) { + throw new IllegalArgumentException("[host] must be of the form [scheme]://[host]:[port] but was [" + hostInRequest + "]"); + } + String scheme = hostMatcher.group("scheme"); + String host = hostMatcher.group("host"); + int port = Integer.parseInt(hostMatcher.group("port")); + if (false == remote.isEmpty()) { + throw new IllegalArgumentException( + "Unsupported fields in [remote]: [" + Strings.collectionToCommaDelimitedString(remote.keySet()) + "]"); + } + return new RemoteInfo(scheme, host, port, queryForRemote(source), username, password); + } + /** * Yank a string array from a map. Emulates XContent's permissive String to * String array conversions. @@ -147,7 +178,32 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler source, String name) { + Object value = source.remove(name); + if (value == null) { + return null; + } + if (value instanceof String) { + return (String) value; + } + throw new IllegalArgumentException("Expected [" + name + "] to be a string but was [" + value + "]"); + } + + private static BytesReference queryForRemote(Map source) throws IOException { + XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); + Object query = source.remove("query"); + if (query == null) { + return matchAllQuery().toXContent(builder, ToXContent.EMPTY_PARAMS).bytes(); + } + if (!(query instanceof Map)) { + throw new IllegalArgumentException("Expected [query] to be an object but was [" + query + "]"); + } + @SuppressWarnings("unchecked") + Map map = (Map) query; + return builder.map(map).bytes(); + } + + static class ReindexParseContext implements ParseFieldMatcherSupplier { private final IndicesQueriesRegistry indicesQueryRegistry; private final ParseFieldMatcher parseFieldMatcher; private final AggregatorParsers aggParsers; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java index cb53509a461..f35225452bd 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestRethrottleAction.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -39,8 +39,8 @@ public class RestRethrottleAction extends BaseRestHandler { private final ClusterService clusterService; @Inject - public RestRethrottleAction(Settings settings, RestController controller, Client client, ClusterService clusterService) { - super(settings, client); + public RestRethrottleAction(Settings settings, RestController controller, ClusterService clusterService) { + super(settings); this.clusterService = clusterService; controller.registerHandler(POST, "/_update_by_query/{taskId}/_rethrottle", this); controller.registerHandler(POST, "/_delete_by_query/{taskId}/_rethrottle", this); @@ -48,7 +48,7 @@ public class RestRethrottleAction extends BaseRestHandler { } @Override - public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { + public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) { RethrottleRequest internalRequest = new RethrottleRequest(); internalRequest.setTaskId(new TaskId(request.param("taskId"))); Float requestsPerSecond = AbstractBaseReindexRestHandler.parseRequestsPerSecond(request); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java index 8eb0e30fe38..a858986ccab 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java @@ -19,9 +19,11 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.query.IndicesQueriesRegistry; @@ -29,30 +31,33 @@ import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.suggest.Suggesters; import java.io.IOException; import java.util.HashMap; +import java.util.Iterator; import java.util.Map; import java.util.function.Consumer; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.script.Script.ScriptField; -public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler { +public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler { @Inject - public RestUpdateByQueryAction(Settings settings, RestController controller, Client client, + public RestUpdateByQueryAction(Settings settings, RestController controller, IndicesQueriesRegistry indicesQueriesRegistry, AggregatorParsers aggParsers, Suggesters suggesters, - ClusterService clusterService, TransportUpdateByQueryAction action) { - super(settings, client, indicesQueriesRegistry, aggParsers, suggesters, clusterService, action); + ClusterService clusterService) { + super(settings, indicesQueriesRegistry, aggParsers, suggesters, clusterService, UpdateByQueryAction.INSTANCE); controller.registerHandler(POST, "/{index}/_update_by_query", this); controller.registerHandler(POST, "/{index}/{type}/_update_by_query", this); } @Override - protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception { - handleRequest(request, channel, false, true); + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { + handleRequest(request, channel, client, false, true); } @Override @@ -67,11 +72,65 @@ public class RestUpdateByQueryAction extends AbstractBulkByQueryRestHandler> consumers = new HashMap<>(); consumers.put("conflicts", o -> internal.setConflicts((String) o)); - consumers.put("script", o -> internal.setScript(Script.parse((Map)o, false, parseFieldMatcher))); + consumers.put("script", o -> internal.setScript(parseScript((Map)o, parseFieldMatcher))); parseInternalRequest(internal, request, consumers); internal.setPipeline(request.param("pipeline")); return internal; } + + @SuppressWarnings("unchecked") + static Script parseScript(Map config, ParseFieldMatcher parseFieldMatcher) { + String script = null; + ScriptService.ScriptType type = null; + String lang = null; + Map params = null; + for (Iterator> itr = config.entrySet().iterator(); itr.hasNext();) { + Map.Entry entry = itr.next(); + String parameterName = entry.getKey(); + Object parameterValue = entry.getValue(); + if (parseFieldMatcher.match(parameterName, ScriptField.LANG)) { + if (parameterValue instanceof String || parameterValue == null) { + lang = (String) parameterValue; + } else { + throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]"); + } + } else if (parseFieldMatcher.match(parameterName, ScriptField.PARAMS)) { + if (parameterValue instanceof Map || parameterValue == null) { + params = (Map) parameterValue; + } else { + throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]"); + } + } else if (parseFieldMatcher.match(parameterName, ScriptService.ScriptType.INLINE.getParseField())) { + if (parameterValue instanceof String || parameterValue == null) { + script = (String) parameterValue; + type = ScriptService.ScriptType.INLINE; + } else { + throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]"); + } + } else if (parseFieldMatcher.match(parameterName, ScriptService.ScriptType.FILE.getParseField())) { + if (parameterValue instanceof String || parameterValue == null) { + script = (String) parameterValue; + type = ScriptService.ScriptType.FILE; + } else { + throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]"); + } + } else if (parseFieldMatcher.match(parameterName, ScriptService.ScriptType.STORED.getParseField())) { + if (parameterValue instanceof String || parameterValue == null) { + script = (String) parameterValue; + type = ScriptService.ScriptType.STORED; + } else { + throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]"); + } + } + } + if (script == null) { + throw new ElasticsearchParseException("expected one of [{}], [{}] or [{}] fields, but found none", + ScriptService.ScriptType.INLINE.getParseField().getPreferredName(), ScriptService.ScriptType.FILE.getParseField() + .getPreferredName(), ScriptService.ScriptType.STORED.getParseField().getPreferredName()); + } + assert type != null : "if script is not null, type should definitely not be null"; + return new Script(script, type, lang, params); + } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java new file mode 100644 index 00000000000..b03496df7a7 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java @@ -0,0 +1,357 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.bulk.BackoffPolicy; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.reindex.remote.RemoteScrollableHitSource; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.Closeable; +import java.io.IOException; +import java.util.List; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +import static java.util.Objects.requireNonNull; + +/** + * A scrollable source of results. + */ +public abstract class ScrollableHitSource implements Closeable { + private final AtomicReference scrollId = new AtomicReference<>(); + + protected final ESLogger logger; + protected final BackoffPolicy backoffPolicy; + protected final ThreadPool threadPool; + protected final Runnable countSearchRetry; + protected final Consumer fail; + + public ScrollableHitSource(ESLogger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry, + Consumer fail) { + this.logger = logger; + this.backoffPolicy = backoffPolicy; + this.threadPool = threadPool; + this.countSearchRetry = countSearchRetry; + this.fail = fail; + } + + public final void start(Consumer onResponse) { + doStart(response -> { + setScroll(response.getScrollId()); + logger.debug("scroll returned [{}] documents with a scroll id of [{}]", response.getHits().size(), response.getScrollId()); + onResponse.accept(response); + }); + } + protected abstract void doStart(Consumer onResponse); + + public final void startNextScroll(TimeValue extraKeepAlive, Consumer onResponse) { + doStartNextScroll(scrollId.get(), extraKeepAlive, response -> { + setScroll(response.getScrollId()); + onResponse.accept(response); + }); + } + protected abstract void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, Consumer onResponse); + + @Override + public void close() { + String scrollId = this.scrollId.get(); + if (Strings.hasLength(scrollId)) { + clearScroll(scrollId); + } + } + protected abstract void clearScroll(String scrollId); + + /** + * Set the id of the last scroll. Used for debugging. + */ + final void setScroll(String scrollId) { + this.scrollId.set(scrollId); + } + + /** + * Response from each scroll batch. + */ + public static class Response { + private final boolean timedOut; + private final List failures; + private final long totalHits; + private final List hits; + private final String scrollId; + + public Response(boolean timedOut, List failures, long totalHits, List hits, String scrollId) { + this.timedOut = timedOut; + this.failures = failures; + this.totalHits = totalHits; + this.hits = hits; + this.scrollId = scrollId; + } + + /** + * Did this batch time out? + */ + public boolean isTimedOut() { + return timedOut; + } + + /** + * Where there any search failures? + */ + public final List getFailures() { + return failures; + } + + /** + * What were the total number of documents matching the search? + */ + public long getTotalHits() { + return totalHits; + } + + /** + * The documents returned in this batch. + */ + public List getHits() { + return hits; + } + + /** + * The scroll id used to fetch the next set of documents. + */ + public String getScrollId() { + return scrollId; + } + } + + /** + * A document returned as part of the response. Think of it like {@link SearchHit} but with all the things reindex needs in convenient + * methods. + */ + public interface Hit { + String getIndex(); + String getType(); + String getId(); + long getVersion(); + /** + * The source of the hit. Returns null if the source didn't come back from the search, usually because it source wasn't stored at + * all. + */ + @Nullable BytesReference getSource(); + @Nullable String getParent(); + @Nullable String getRouting(); + @Nullable Long getTimestamp(); + @Nullable Long getTTL(); + } + + /** + * An implementation of {@linkplain Hit} that uses getters and setters. Primarily used for testing and {@link RemoteScrollableHitSource} + * . + */ + public static class BasicHit implements Hit { + private final String index; + private final String type; + private final String id; + private final long version; + + private BytesReference source; + private String parent; + private String routing; + private Long timestamp; + private Long ttl; + + public BasicHit(String index, String type, String id, long version) { + this.index = index; + this.type = type; + this.id = id; + this.version = version; + } + + @Override + public String getIndex() { + return index; + } + + @Override + public String getType() { + return type; + } + + @Override + public String getId() { + return id; + } + + @Override + public long getVersion() { + return version; + } + + @Override + public BytesReference getSource() { + return source; + } + + public BasicHit setSource(BytesReference source) { + this.source = source; + return this; + } + + @Override + public String getParent() { + return parent; + } + + public BasicHit setParent(String parent) { + this.parent = parent; + return this; + } + + @Override + public String getRouting() { + return routing; + } + + public BasicHit setRouting(String routing) { + this.routing = routing; + return this; + } + + @Override + public Long getTimestamp() { + return timestamp; + } + + public BasicHit setTimestamp(Long timestamp) { + this.timestamp = timestamp; + return this; + } + + @Override + public Long getTTL() { + return ttl; + } + + public BasicHit setTTL(Long ttl) { + this.ttl = ttl; + return this; + } + } + + /** + * A failure during search. Like {@link ShardSearchFailure} but useful for reindex from remote as well. + */ + public static class SearchFailure implements Writeable, ToXContent { + private final Throwable reason; + @Nullable + private final String index; + @Nullable + private final Integer shardId; + @Nullable + private final String nodeId; + + public SearchFailure(Throwable reason, @Nullable String index, @Nullable Integer shardId, @Nullable String nodeId) { + this.index = index; + this.shardId = shardId; + this.reason = requireNonNull(reason, "reason cannot be null"); + this.nodeId = nodeId; + } + + /** + * Build a search failure that doesn't have shard information available. + */ + public SearchFailure(Throwable reason) { + this(reason, null, null, null); + } + + /** + * Read from a stream. + */ + public SearchFailure(StreamInput in) throws IOException { + reason = in.readException(); + index = in.readOptionalString(); + shardId = in.readOptionalVInt(); + nodeId = in.readOptionalString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeException(reason); + out.writeOptionalString(index); + out.writeOptionalVInt(shardId); + out.writeOptionalString(nodeId); + } + + public String getIndex() { + return index; + } + + public Integer getShardId() { + return shardId; + } + + public Throwable getReason() { + return reason; + } + + @Nullable + public String getNodeId() { + return nodeId; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (index != null) { + builder.field("index", index); + } + if (shardId != null) { + builder.field("shard", shardId); + } + if (nodeId != null) { + builder.field("node", nodeId); + } + builder.field("reason"); + { + builder.startObject(); + ElasticsearchException.toXContent(builder, params, reason); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return Strings.toString(this); + } + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java index 471bd066f94..c3847ab2125 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -31,10 +31,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.internal.ParentFieldMapper; -import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -74,35 +71,35 @@ public class TransportDeleteByQueryAction extends HandledTransportAction listener, ScriptService scriptService, ClusterState clusterState) { - super(task, logger, client, threadPool, request, request.getSearchRequest(), listener, scriptService, clusterState); + super(task, logger, client, threadPool, request, listener, scriptService, clusterState); } @Override - protected boolean accept(SearchHit doc) { + protected boolean accept(ScrollableHitSource.Hit doc) { // Delete-by-query does not require the source to delete a document // and the default implementation checks for it return true; } @Override - protected RequestWrapper buildRequest(SearchHit doc) { + protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) { DeleteRequest delete = new DeleteRequest(); - delete.index(doc.index()); - delete.type(doc.type()); - delete.id(doc.id()); - delete.version(doc.version()); + delete.index(doc.getIndex()); + delete.type(doc.getType()); + delete.id(doc.getId()); + delete.version(doc.getVersion()); return wrap(delete); } /** - * Overrides the parent {@link AbstractAsyncBulkIndexByScrollAction#copyMetadata(RequestWrapper, SearchHit)} + * Overrides the parent {@link AbstractAsyncBulkIndexByScrollAction#copyMetadata(RequestWrapper, ScrollableHitSource.Hit)} * method that is much more Update/Reindex oriented and so also copies things like timestamp/ttl which we * don't care for a deletion. */ @Override - protected RequestWrapper copyMetadata(RequestWrapper request, SearchHit doc) { - copyParent(request, fieldValue(doc, ParentFieldMapper.NAME)); - copyRouting(request, fieldValue(doc, RoutingFieldMapper.NAME)); + protected RequestWrapper copyMetadata(RequestWrapper request, ScrollableHitSource.Hit doc) { + request.setParent(doc.getParent()); + request.setRouting(doc.getRouting()); return request; } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index e98c45595c7..3e6f806d293 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -19,8 +19,10 @@ package org.elasticsearch.index.reindex; +import org.apache.http.HttpHost; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.ActionFilters; @@ -28,51 +30,74 @@ import org.elasticsearch.action.support.AutoCreateIndex; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.client.ParentTaskAssigningClient; +import org.elasticsearch.client.RestClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.lucene.uid.Versions; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.http.HttpInfo; +import org.elasticsearch.http.HttpServer; import org.elasticsearch.index.mapper.internal.TTLFieldMapper; import org.elasticsearch.index.mapper.internal.VersionFieldMapper; +import org.elasticsearch.index.reindex.remote.RemoteInfo; +import org.elasticsearch.index.reindex.remote.RemoteScrollableHitSource; +import org.elasticsearch.node.service.NodeService; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.function.BiFunction; +import java.util.function.Function; +import static java.util.Collections.emptyList; import static java.util.Objects.requireNonNull; import static org.elasticsearch.index.VersionType.INTERNAL; public class TransportReindexAction extends HandledTransportAction { + public static final Setting> REMOTE_CLUSTER_WHITELIST = + Setting.listSetting("reindex.remote.whitelist", emptyList(), Function.identity(), Property.NodeScope); + private final ClusterService clusterService; private final ScriptService scriptService; private final AutoCreateIndex autoCreateIndex; private final Client client; + private final Set remoteWhitelist; + private final HttpServer httpServer; @Inject public TransportReindexAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService, ScriptService scriptService, - AutoCreateIndex autoCreateIndex, Client client, TransportService transportService) { + AutoCreateIndex autoCreateIndex, Client client, TransportService transportService, @Nullable HttpServer httpServer) { super(settings, ReindexAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, ReindexRequest::new); this.clusterService = clusterService; this.scriptService = scriptService; this.autoCreateIndex = autoCreateIndex; this.client = client; + remoteWhitelist = new HashSet<>(REMOTE_CLUSTER_WHITELIST.get(settings)); + this.httpServer = httpServer; } @Override protected void doExecute(Task task, ReindexRequest request, ActionListener listener) { + checkRemoteWhitelist(request.getRemoteInfo()); ClusterState state = clusterService.state(); - validateAgainstAliases(request.getSearchRequest(), request.getDestination(), indexNameExpressionResolver, autoCreateIndex, state); + validateAgainstAliases(request.getSearchRequest(), request.getDestination(), request.getRemoteInfo(), indexNameExpressionResolver, + autoCreateIndex, state); ParentTaskAssigningClient client = new ParentTaskAssigningClient(this.client, clusterService.localNode(), task); new AsyncIndexBySearchAction((BulkByScrollTask) task, logger, client, threadPool, request, listener, scriptService, state).start(); } @@ -82,15 +107,43 @@ public class TransportReindexAction extends HandledTransportAction whitelist, RemoteInfo remoteInfo, TransportAddress publishAddress) { + if (remoteInfo == null) return; + String check = remoteInfo.getHost() + ':' + remoteInfo.getPort(); + if (whitelist.contains(check)) return; + /* + * For testing we support the key "myself" to allow connecting to the local node. We can't just change the setting to include the + * local node because it is intentionally not a dynamic setting for security purposes. We can't use something like "localhost:9200" + * because we don't know up front which port we'll get because the tests bind to port 0. Instead we try to resolve it here, taking + * "myself" to mean "my published http address". + */ + if (whitelist.contains("myself") && publishAddress != null && publishAddress.toString().equals(check)) { + return; + } + throw new IllegalArgumentException('[' + check + "] not whitelisted in " + REMOTE_CLUSTER_WHITELIST.getKey()); + } + /** * Throws an ActionRequestValidationException if the request tries to index * back into the same index or into an index that points to two indexes. * This cannot be done during request validation because the cluster state * isn't available then. Package private for testing. */ - static String validateAgainstAliases(SearchRequest source, IndexRequest destination, + static void validateAgainstAliases(SearchRequest source, IndexRequest destination, RemoteInfo remoteInfo, IndexNameExpressionResolver indexNameExpressionResolver, AutoCreateIndex autoCreateIndex, ClusterState clusterState) { + if (remoteInfo != null) { + return; + } String target = destination.index(); if (false == autoCreateIndex.shouldAutoCreate(target, clusterState)) { /* @@ -107,7 +160,6 @@ public class TransportReindexAction extends HandledTransportAction listener, ScriptService scriptService, ClusterState clusterState) { - super(task, logger, client, threadPool, request, request.getSearchRequest(), listener, scriptService, clusterState); + super(task, logger, client, threadPool, request, listener, scriptService, clusterState); } @Override - protected BiFunction, SearchHit, RequestWrapper> buildScriptApplier() { + protected ScrollableHitSource buildScrollableResultSource(BackoffPolicy backoffPolicy) { + if (mainRequest.getRemoteInfo() != null) { + // NORELEASE track 500-level retries that are builtin to the client + RemoteInfo remoteInfo = mainRequest.getRemoteInfo(); + if (remoteInfo.getUsername() != null) { + // NORELEASE support auth + throw new UnsupportedOperationException("Auth is unsupported"); + } + RestClient restClient = RestClient.builder(new HttpHost(remoteInfo.getHost(), remoteInfo.getPort(), remoteInfo.getScheme())) + .build(); + RemoteScrollableHitSource.AsyncClient client = new RemoteScrollableHitSource.AsynchronizingRestClient(threadPool, + restClient); + return new RemoteScrollableHitSource(logger, backoffPolicy, threadPool, task::countSearchRetry, this::finishHim, client, + remoteInfo.getQuery(), mainRequest.getSearchRequest()); + } + return super.buildScrollableResultSource(backoffPolicy); + } + + @Override + protected BiFunction, ScrollableHitSource.Hit, RequestWrapper> buildScriptApplier() { Script script = mainRequest.getScript(); if (script != null) { return new ReindexScriptApplier(task, scriptService, script, script.getParams()); @@ -134,7 +205,7 @@ public class TransportReindexAction extends HandledTransportAction buildRequest(SearchHit doc) { + protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) { IndexRequest index = new IndexRequest(); // Copy the index from the request so we always write where it asked to write @@ -142,7 +213,7 @@ public class TransportReindexAction extends HandledTransportAction listener, ScriptService scriptService, ClusterState clusterState) { - super(task, logger, client, threadPool, request, request.getSearchRequest(), listener, scriptService, clusterState); + super(task, logger, client, threadPool, request, listener, scriptService, clusterState); } @Override - protected BiFunction, SearchHit, RequestWrapper> buildScriptApplier() { + protected BiFunction, ScrollableHitSource.Hit, RequestWrapper> buildScriptApplier() { Script script = mainRequest.getScript(); if (script != null) { return new UpdateByQueryScriptApplier(task, scriptService, script, script.getParams()); @@ -98,14 +97,14 @@ public class TransportUpdateByQueryAction extends HandledTransportAction buildRequest(SearchHit doc) { + protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) { IndexRequest index = new IndexRequest(); - index.index(doc.index()); - index.type(doc.type()); - index.id(doc.id()); - index.source(doc.sourceRef()); + index.index(doc.getIndex()); + index.type(doc.getType()); + index.id(doc.getId()); + index.source(doc.getSource()); index.versionType(VersionType.INTERNAL); - index.version(doc.version()); + index.version(doc.getVersion()); index.setPipeline(mainRequest.getPipeline()); return wrap(index); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteInfo.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteInfo.java new file mode 100644 index 00000000000..89d6cb18401 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteInfo.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; + +import static java.util.Objects.requireNonNull; + +public class RemoteInfo implements Writeable { + private final String scheme; + private final String host; + private final int port; + private final BytesReference query; + private final String username; + private final String password; + + public RemoteInfo(String scheme, String host, int port, BytesReference query, String username, String password) { + this.scheme = requireNonNull(scheme, "[scheme] must be specified to reindex from a remote cluster"); + this.host = requireNonNull(host, "[host] must be specified to reindex from a remote cluster"); + this.port = port; + this.query = requireNonNull(query, "[query] must be specified to reindex from a remote cluster"); + this.username = username; + this.password = password; + } + + /** + * Read from a stream. + */ + public RemoteInfo(StreamInput in) throws IOException { + scheme = in.readString(); + host = in.readString(); + port = in.readVInt(); + query = in.readBytesReference(); + username = in.readOptionalString(); + password = in.readOptionalString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(scheme); + out.writeString(host); + out.writeVInt(port); + out.writeBytesReference(query); + out.writeOptionalString(username); + out.writeOptionalString(password); + } + + public String getScheme() { + return scheme; + } + + public String getHost() { + return host; + } + + public int getPort() { + return port; + } + + public BytesReference getQuery() { + return query; + } + + @Nullable + public String getUsername() { + return username; + } + + @Nullable + public String getPassword() { + return password; + } + + @Override + public String toString() { + StringBuilder b = new StringBuilder(); + if (false == "http".equals(scheme)) { + // http is the default so it isn't worth taking up space if it is the scheme + b.append("scheme=").append(scheme).append(' '); + } + b.append("host=").append(host).append(" port=").append(port).append(" query=").append(query.utf8ToString()); + if (username != null) { + b.append(" username=").append(username); + } + if (password != null) { + b.append(" password=<<>>"); + } + return b.toString(); + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java new file mode 100644 index 00000000000..00c9f0ae509 --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java @@ -0,0 +1,163 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.apache.http.HttpEntity; +import org.apache.http.entity.ByteArrayEntity; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.SortBuilder; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.singletonMap; + +final class RemoteRequestBuilders { + private RemoteRequestBuilders() {} + + static String initialSearchPath(SearchRequest searchRequest) { + // It is nasty to build paths with StringBuilder but we'll be careful.... + StringBuilder path = new StringBuilder("/"); + addIndexesOrTypes(path, "Index", searchRequest.indices()); + addIndexesOrTypes(path, "Type", searchRequest.types()); + path.append("_search"); + return path.toString(); + } + + static Map initialSearchParams(SearchRequest searchRequest, Version remoteVersion) { + Map params = new HashMap<>(); + if (searchRequest.scroll() != null) { + params.put("scroll", searchRequest.scroll().keepAlive().toString()); + } + params.put("size", Integer.toString(searchRequest.source().size())); + if (searchRequest.source().version() == null || searchRequest.source().version() == true) { + // false is the only value that makes it false. Null defaults to true.... + params.put("version", null); + } + if (searchRequest.source().sorts() != null) { + boolean useScan = false; + // Detect if we should use search_type=scan rather than a sort + if (remoteVersion.before(Version.V_2_1_0)) { + for (SortBuilder sort : searchRequest.source().sorts()) { + if (sort instanceof FieldSortBuilder) { + FieldSortBuilder f = (FieldSortBuilder) sort; + if (f.getFieldName().equals(FieldSortBuilder.DOC_FIELD_NAME)) { + useScan = true; + break; + } + } + } + } + if (useScan) { + params.put("search_type", "scan"); + } else { + StringBuilder sorts = new StringBuilder(sortToUri(searchRequest.source().sorts().get(0))); + for (int i = 1; i < searchRequest.source().sorts().size(); i++) { + sorts.append(',').append(sortToUri(searchRequest.source().sorts().get(i))); + } + params.put("sorts", sorts.toString()); + } + } + if (searchRequest.source().storedFields() != null && false == searchRequest.source().storedFields().isEmpty()) { + StringBuilder fields = new StringBuilder(searchRequest.source().storedFields().get(0)); + for (int i = 1; i < searchRequest.source().storedFields().size(); i++) { + fields.append(',').append(searchRequest.source().storedFields().get(i)); + } + String storedFieldsParamName = remoteVersion.before(Version.V_5_0_0_alpha4) ? "fields" : "stored_fields"; + params.put(storedFieldsParamName, fields.toString()); + } + return params; + } + + static HttpEntity initialSearchEntity(BytesReference query) { + try (XContentBuilder entity = JsonXContent.contentBuilder(); XContentParser queryParser = XContentHelper.createParser(query)) { + entity.startObject(); + entity.field("query"); + /* + * We're intentionally a bit paranoid here - copying the query as xcontent rather than writing a raw field. We don't want poorly + * written queries to escape. Ever. + */ + entity.copyCurrentStructure(queryParser); + XContentParser.Token shouldBeEof = queryParser.nextToken(); + if (shouldBeEof != null) { + throw new ElasticsearchException( + "query was more than a single object. This first token after the object is [" + shouldBeEof + "]"); + } + entity.endObject(); + BytesRef bytes = entity.bytes().toBytesRef(); + return new ByteArrayEntity(bytes.bytes, bytes.offset, bytes.length, ContentType.APPLICATION_JSON); + } catch (IOException e) { + throw new ElasticsearchException("unexpected error building entity", e); + } + } + + private static void addIndexesOrTypes(StringBuilder path, String name, String[] indicesOrTypes) { + if (indicesOrTypes == null || indicesOrTypes.length == 0) { + return; + } + for (String indexOrType : indicesOrTypes) { + checkIndexOrType(name, indexOrType); + } + path.append(Strings.arrayToCommaDelimitedString(indicesOrTypes)).append('/'); + } + + private static void checkIndexOrType(String name, String indexOrType) { + if (indexOrType.indexOf(',') >= 0) { + throw new IllegalArgumentException(name + " containing [,] not supported but got [" + indexOrType + "]"); + } + if (indexOrType.indexOf('/') >= 0) { + throw new IllegalArgumentException(name + " containing [/] not supported but got [" + indexOrType + "]"); + } + } + + private static String sortToUri(SortBuilder sort) { + if (sort instanceof FieldSortBuilder) { + FieldSortBuilder f = (FieldSortBuilder) sort; + return f.getFieldName() + ":" + f.order(); + } + throw new IllegalArgumentException("Unsupported sort [" + sort + "]"); + } + + static String scrollPath() { + return "/_search/scroll"; + } + + static Map scrollParams(TimeValue keepAlive) { + return singletonMap("scroll", keepAlive.toString()); + } + + static HttpEntity scrollEntity(String scroll) { + return new StringEntity(scroll, ContentType.TEXT_PLAIN); + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java new file mode 100644 index 00000000000..0a467593a2c --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java @@ -0,0 +1,301 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.ParseFieldMatcherSupplier; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentLocation; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.reindex.ScrollableHitSource.BasicHit; +import org.elasticsearch.index.reindex.ScrollableHitSource.Hit; +import org.elasticsearch.index.reindex.ScrollableHitSource.Response; +import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; + +import java.io.IOException; +import java.util.List; +import java.util.function.BiFunction; + +import static java.util.Collections.emptyList; +import static java.util.Collections.singletonList; +import static java.util.Objects.requireNonNull; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +/** + * Parsers to convert the response from the remote host into objects useful for {@link RemoteScrollableHitSource}. Lots of data is + * intentionally thrown on the floor because we don't need it but ObjectParser and friends are strict about blowing up when they see + * elements they don't understand. So you'll see a lot of BiConsumers that look like "(b, v) -> {}". That means "I don't care about the + * value here, just throw it away and don't blow up. + */ +final class RemoteResponseParsers { + private RemoteResponseParsers() {} + + /** + * Parser for an individual {@code hit} element. + */ + public static final ConstructingObjectParser HIT_PARSER = new ConstructingObjectParser<>("hit", + a -> { + int i = 0; + String index = (String) a[i++]; + String type = (String) a[i++]; + String id = (String) a[i++]; + long version = (long) a[i++]; + return new BasicHit(index, type, id, version); + }); + static { + HIT_PARSER.declareString(constructorArg(), new ParseField("_index")); + HIT_PARSER.declareString(constructorArg(), new ParseField("_type")); + HIT_PARSER.declareString(constructorArg(), new ParseField("_id")); + HIT_PARSER.declareLong(constructorArg(), new ParseField("_version")); + HIT_PARSER.declareObject(BasicHit::setSource, (p, s) -> { + try { + /* + * We spool the data from the remote back into xcontent so we can get bytes to send. There ought to be a better way but for + * now this should do. + */ + try (XContentBuilder b = JsonXContent.contentBuilder()) { + b.copyCurrentStructure(p); + return b.bytes(); + } + } catch (IOException e) { + throw new ParsingException(p.getTokenLocation(), "[hit] failed to parse [_source]", e); + } + }, new ParseField("_source")); + HIT_PARSER.declareString(BasicHit::setRouting, new ParseField("_routing")); + HIT_PARSER.declareString(BasicHit::setParent, new ParseField("_parent")); + HIT_PARSER.declareLong(BasicHit::setTTL, new ParseField("_ttl")); + HIT_PARSER.declareLong(BasicHit::setTimestamp, new ParseField("_timestamp")); + HIT_PARSER.declareField((b, v) -> {}, p -> null, new ParseField("_score"), ValueType.FLOAT_OR_NULL); + HIT_PARSER.declareStringArray((b, v) -> {}, new ParseField("sort")); + } + + /** + * Parser for the {@code hits} element. Parsed to an array of {@code [total (Long), hits (List)]}. + */ + public static final ConstructingObjectParser HITS_PARSER = new ConstructingObjectParser<>("hits", + a -> a); + static { + HITS_PARSER.declareLong(constructorArg(), new ParseField("total")); + HITS_PARSER.declareObjectArray(constructorArg(), HIT_PARSER, new ParseField("hits")); + HITS_PARSER.declareField((b, v) -> {}, p -> null, new ParseField("max_score"), ValueType.FLOAT_OR_NULL); + } + + /** + * Parser for {@code failed} shards in the {@code _shards} elements. + */ + public static final ConstructingObjectParser SEARCH_FAILURE_PARSER = + new ConstructingObjectParser<>("failure", a -> { + int i = 0; + String index = (String) a[i++]; + Integer shardId = (Integer) a[i++]; + String nodeId = (String) a[i++]; + Object reason = a[i++]; + + Throwable reasonThrowable; + if (reason instanceof String) { + reasonThrowable = new RuntimeException("Unknown remote exception with reason=[" + (String) reason + "]"); + } else { + reasonThrowable = (Throwable) reason; + } + return new SearchFailure(reasonThrowable, index, shardId, nodeId); + }); + static { + SEARCH_FAILURE_PARSER.declareString(optionalConstructorArg(), new ParseField("index")); + SEARCH_FAILURE_PARSER.declareInt(optionalConstructorArg(), new ParseField("shard")); + SEARCH_FAILURE_PARSER.declareString(optionalConstructorArg(), new ParseField("node")); + SEARCH_FAILURE_PARSER.declareField(constructorArg(), (p, c) -> { + if (p.currentToken() == XContentParser.Token.START_OBJECT) { + return ThrowableBuilder.PARSER.apply(p, c); + } else { + return p.text(); + } + }, new ParseField("reason"), ValueType.OBJECT_OR_STRING); + SEARCH_FAILURE_PARSER.declareInt((b, v) -> {}, new ParseField("status")); + } + + /** + * Parser for the {@code _shards} element. Throws everything out except the errors array if there is one. If there isn't one then it + * parses to an empty list. + */ + public static final ConstructingObjectParser, ParseFieldMatcherSupplier> SHARDS_PARSER = + new ConstructingObjectParser<>("_shards", a -> { + @SuppressWarnings("unchecked") + List failures = (List) a[0]; + failures = failures == null ? emptyList() : failures; + return failures; + }); + static { + SHARDS_PARSER.declareObjectArray(optionalConstructorArg(), SEARCH_FAILURE_PARSER, new ParseField("failures")); + SHARDS_PARSER.declareInt((b, v) -> {}, new ParseField("total")); + SHARDS_PARSER.declareInt((b, v) -> {}, new ParseField("successful")); + SHARDS_PARSER.declareInt((b, v) -> {}, new ParseField("failed")); + } + + public static final ConstructingObjectParser RESPONSE_PARSER = + new ConstructingObjectParser<>("search_response", a -> { + int i = 0; + Throwable catastrophicFailure = (Throwable) a[i++]; + if (catastrophicFailure != null) { + return new Response(false, singletonList(new SearchFailure(catastrophicFailure)), 0, emptyList(), null); + } + boolean timedOut = (boolean) a[i++]; + String scroll = (String) a[i++]; + Object[] hitsElement = (Object[]) a[i++]; + @SuppressWarnings("unchecked") + List failures = (List) a[i++]; + + long totalHits = 0; + List hits = emptyList(); + + // Pull apart the hits element if we got it + if (hitsElement != null) { + i = 0; + totalHits = (long) hitsElement[i++]; + @SuppressWarnings("unchecked") + List h = (List) hitsElement[i++]; + hits = h; + } + + return new Response(timedOut, failures, totalHits, hits, scroll); + }); + static { + RESPONSE_PARSER.declareObject(optionalConstructorArg(), ThrowableBuilder.PARSER, new ParseField("error")); + RESPONSE_PARSER.declareBoolean(optionalConstructorArg(), new ParseField("timed_out")); + RESPONSE_PARSER.declareString(optionalConstructorArg(), new ParseField("_scroll_id")); + RESPONSE_PARSER.declareObject(optionalConstructorArg(), HITS_PARSER, new ParseField("hits")); + RESPONSE_PARSER.declareObject(optionalConstructorArg(), SHARDS_PARSER, new ParseField("_shards")); + RESPONSE_PARSER.declareInt((b, v) -> {}, new ParseField("took")); + RESPONSE_PARSER.declareBoolean((b, v) -> {}, new ParseField("terminated_early")); + RESPONSE_PARSER.declareInt((b, v) -> {}, new ParseField("status")); + } + + /** + * Collects stuff about Throwables and attempts to rebuild them. + */ + public static class ThrowableBuilder { + public static final BiFunction PARSER; + static { + ObjectParser parser = new ObjectParser<>("reason", ThrowableBuilder::new); + PARSER = parser.andThen(ThrowableBuilder::build); + parser.declareString(ThrowableBuilder::setType, new ParseField("type")); + parser.declareString(ThrowableBuilder::setReason, new ParseField("reason")); + parser.declareObject(ThrowableBuilder::setCausedBy, PARSER, new ParseField("caused_by")); + + // So we can give a nice error for parsing exceptions + parser.declareInt(ThrowableBuilder::setLine, new ParseField("line")); + parser.declareInt(ThrowableBuilder::setColumn, new ParseField("col")); + + // So we don't blow up on search exceptions + parser.declareString((b, v) -> {}, new ParseField("phase")); + parser.declareBoolean((b, v) -> {}, new ParseField("grouped")); + parser.declareField((p, v, c) -> p.skipChildren(), new ParseField("failed_shards"), ValueType.OBJECT_ARRAY); + + // Just throw away the root_cause + parser.declareField((p, v, c) -> p.skipChildren(), new ParseField("root_cause"), ValueType.OBJECT_ARRAY); + } + + private String type; + private String reason; + private Integer line; + private Integer column; + private Throwable causedBy; + + public Throwable build() { + Throwable t = buildWithoutCause(); + if (causedBy != null) { + t.initCause(causedBy); + } + return t; + } + + private Throwable buildWithoutCause() { + requireNonNull(type, "[type] is required"); + requireNonNull(reason, "[reason] is required"); + switch (type) { + // Make some effort to use the right exceptions + case "es_rejected_execution_exception": + return new EsRejectedExecutionException(reason); + case "parsing_exception": + XContentLocation location = null; + if (line != null && column != null) { + location = new XContentLocation(line, column); + } + return new ParsingException(location, reason); + // But it isn't worth trying to get it perfect.... + default: + return new RuntimeException(type + ": " + reason); + } + } + + public void setType(String type) { + this.type = type; + } + public void setReason(String reason) { + this.reason = reason; + } + public void setLine(Integer line) { + this.line = line; + } + public void setColumn(Integer column) { + this.column = column; + } + public void setCausedBy(Throwable causedBy) { + this.causedBy = causedBy; + } + } + + /** + * Parses the {@code version} field of the main action. There are a surprising number of fields in this that we don't need! + */ + public static final ConstructingObjectParser VERSION_PARSER = new ConstructingObjectParser<>( + "version", a -> Version.fromString((String) a[0])); + static { + VERSION_PARSER.declareString(constructorArg(), new ParseField("number")); + VERSION_PARSER.declareBoolean((p, v) -> {}, new ParseField("snapshot_build")); + VERSION_PARSER.declareBoolean((p, v) -> {}, new ParseField("build_snapshot")); + VERSION_PARSER.declareString((p, v) -> {}, new ParseField("build_hash")); + VERSION_PARSER.declareString((p, v) -> {}, new ParseField("build_date")); + VERSION_PARSER.declareString((p, v) -> {}, new ParseField("build_timestamp")); + VERSION_PARSER.declareString((p, v) -> {}, new ParseField("lucene_version")); + } + + /** + * Parses the main action to return just the {@linkplain Version} that it returns. We throw everything else out. + */ + public static final ConstructingObjectParser MAIN_ACTION_PARSER = new ConstructingObjectParser<>( + "/", a -> (Version) a[0]); + static { + MAIN_ACTION_PARSER.declareBoolean((p, v) -> {}, new ParseField("ok")); + MAIN_ACTION_PARSER.declareInt((p, v) -> {}, new ParseField("status")); + MAIN_ACTION_PARSER.declareString((p, v) -> {}, new ParseField("name")); + MAIN_ACTION_PARSER.declareString((p, v) -> {}, new ParseField("cluster_name")); + MAIN_ACTION_PARSER.declareString((p, v) -> {}, new ParseField("name")); + MAIN_ACTION_PARSER.declareString((p, v) -> {}, new ParseField("tagline")); + MAIN_ACTION_PARSER.declareObject(constructorArg(), VERSION_PARSER, new ParseField("version")); + } +} diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java new file mode 100644 index 00000000000..62dbd59f80a --- /dev/null +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSource.java @@ -0,0 +1,242 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.apache.http.HttpEntity; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.bulk.BackoffPolicy; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.ParseFieldMatcherSupplier; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.reindex.ScrollableHitSource; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.BufferedInputStream; +import java.io.Closeable; +import java.io.IOException; +import java.io.InputStream; +import java.util.Iterator; +import java.util.Map; +import java.util.function.BiFunction; +import java.util.function.Consumer; + +import static java.util.Collections.emptyMap; +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; +import static org.elasticsearch.common.unit.TimeValue.timeValueNanos; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchEntity; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchParams; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchPath; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.scrollEntity; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.scrollParams; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.scrollPath; +import static org.elasticsearch.index.reindex.remote.RemoteResponseParsers.MAIN_ACTION_PARSER; +import static org.elasticsearch.index.reindex.remote.RemoteResponseParsers.RESPONSE_PARSER; + +public class RemoteScrollableHitSource extends ScrollableHitSource { + private final AsyncClient client; + private final BytesReference query; + private final SearchRequest searchRequest; + Version remoteVersion; + + public RemoteScrollableHitSource(ESLogger logger, BackoffPolicy backoffPolicy, ThreadPool threadPool, Runnable countSearchRetry, + Consumer fail, AsyncClient client, BytesReference query, SearchRequest searchRequest) { + super(logger, backoffPolicy, threadPool, countSearchRetry, fail); + this.query = query; + this.searchRequest = searchRequest; + this.client = client; + } + + @Override + public void close() { + try { + client.close(); + } catch (IOException e) { + fail.accept(new IOException("couldn't close the remote connection", e)); + } + } + + @Override + protected void doStart(Consumer onResponse) { + lookupRemoteVersion(version -> { + remoteVersion = version; + execute("POST", initialSearchPath(searchRequest), initialSearchParams(searchRequest, version), + initialSearchEntity(query), RESPONSE_PARSER, r -> onStartResponse(onResponse, r)); + }); + } + + void lookupRemoteVersion(Consumer onVersion) { + execute("GET", "", emptyMap(), null, MAIN_ACTION_PARSER, onVersion); + + } + + void onStartResponse(Consumer onResponse, Response response) { + if (Strings.hasLength(response.getScrollId()) && response.getHits().isEmpty()) { + logger.debug("First response looks like a scan response. Jumping right to the second. scroll=[{}]", response.getScrollId()); + doStartNextScroll(response.getScrollId(), timeValueMillis(0), onResponse); + } else { + onResponse.accept(response); + } + } + + @Override + protected void doStartNextScroll(String scrollId, TimeValue extraKeepAlive, Consumer onResponse) { + execute("POST", scrollPath(), scrollParams(timeValueNanos(searchRequest.scroll().keepAlive().nanos() + extraKeepAlive.nanos())), + scrollEntity(scrollId), RESPONSE_PARSER, onResponse); + } + + @Override + protected void clearScroll(String scrollId) { + // Need to throw out response.... + client.performRequest("DELETE", scrollPath(), emptyMap(), scrollEntity(scrollId), new ResponseListener() { + @Override + public void onResponse(InputStream response) { + logger.debug("Successfully cleared [{}]", scrollId); + } + + @Override + public void onRetryableFailure(Exception t) { + onFailure(t); + } + + @Override + public void onFailure(Exception t) { + logger.warn("Failed to clear scroll [{}]", t, scrollId); + } + }); + } + + void execute(String method, String uri, Map params, HttpEntity entity, + BiFunction parser, Consumer listener) { + class RetryHelper extends AbstractRunnable { + private final Iterator retries = backoffPolicy.iterator(); + + @Override + protected void doRun() throws Exception { + client.performRequest(method, uri, params, entity, new ResponseListener() { + @Override + public void onResponse(InputStream content) { + T response; + try { + XContent xContent = XContentFactory.xContentType(content).xContent(); + try(XContentParser xContentParser = xContent.createParser(content)) { + response = parser.apply(xContentParser, () -> ParseFieldMatcher.STRICT); + } + } catch (IOException e) { + throw new ElasticsearchException("Error deserializing response", e); + } + listener.accept(response); + } + + @Override + public void onFailure(Exception e) { + fail.accept(e); + } + + @Override + public void onRetryableFailure(Exception t) { + if (retries.hasNext()) { + TimeValue delay = retries.next(); + logger.trace("retrying rejected search after [{}]", t, delay); + countSearchRetry.run(); + threadPool.schedule(delay, ThreadPool.Names.SAME, RetryHelper.this); + } else { + fail.accept(t); + } + } + }); + } + + @Override + public void onFailure(Exception t) { + fail.accept(t); + } + } + new RetryHelper().run(); + } + + public interface AsyncClient extends Closeable { + void performRequest(String method, String uri, Map params, HttpEntity entity, ResponseListener listener); + } + + public interface ResponseListener extends ActionListener { + void onRetryableFailure(Exception t); + } + + public static class AsynchronizingRestClient implements AsyncClient { + private final ThreadPool threadPool; + private final RestClient restClient; + + public AsynchronizingRestClient(ThreadPool threadPool, RestClient restClient) { + this.threadPool = threadPool; + this.restClient = restClient; + } + + @Override + public void performRequest(String method, String uri, Map params, HttpEntity entity, + ResponseListener listener) { + /* + * We use the generic thread pool here because this client is blocking the generic thread pool is sized appropriately for some + * of the threads on it to be blocked, waiting on IO. It'd be a disaster if this ran on the listener thread pool, eating + * valuable threads needed to handle responses. Most other thread pool would probably not mind running this either, but the + * generic thread pool is the "most right" place for it to run. We could make our own thread pool for this but the generic + * thread pool already has plenty of capacity. + */ + threadPool.generic().execute(new AbstractRunnable() { + @Override + protected void doRun() throws Exception { + try (org.elasticsearch.client.Response response = restClient.performRequest(method, uri, params, entity)) { + InputStream markSupportedInputStream = new BufferedInputStream(response.getEntity().getContent()); + listener.onResponse(markSupportedInputStream); + } + } + + @Override + public void onFailure(Exception t) { + if (t instanceof ResponseException) { + ResponseException re = (ResponseException) t; + if (RestStatus.TOO_MANY_REQUESTS.getStatus() == re.getResponse().getStatusLine().getStatusCode()) { + listener.onRetryableFailure(t); + return; + } + } + listener.onFailure(t); + } + }); + } + + @Override + public void close() throws IOException { + restClient.close(); + } + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java index 0318e4ddb01..f8351b262fc 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexByScrollActionScriptTestCase.java @@ -22,21 +22,15 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.common.text.Text; -import org.elasticsearch.index.Index; import org.elasticsearch.index.reindex.AbstractAsyncBulkIndexByScrollAction.OpType; import org.elasticsearch.index.reindex.AbstractAsyncBulkIndexByScrollAction.RequestWrapper; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.SearchHitField; -import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.InternalSearchHit; import org.junit.Before; import org.mockito.Matchers; -import java.util.HashMap; import java.util.Map; import java.util.function.Consumer; @@ -63,9 +57,7 @@ public abstract class AbstractAsyncBulkIndexByScrollActionScriptTestCase< @SuppressWarnings("unchecked") protected > T applyScript(Consumer> scriptBody) { IndexRequest index = new IndexRequest("index", "type", "1").source(singletonMap("foo", "bar")); - Map fields = new HashMap<>(); - InternalSearchHit doc = new InternalSearchHit(0, "id", new Text("type"), fields); - doc.shardTarget(new SearchShardTarget("nodeid", new Index("index", "uuid"), 1)); + ScrollableHitSource.Hit doc = new ScrollableHitSource.BasicHit("test", "type", "id", 0); ExecutableScript executableScript = new SimpleExecutableScript(scriptBody); when(scriptService.executable(any(CompiledScript.class), Matchers.>any())) diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java index 5a9976fc005..4cc10334223 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java @@ -20,16 +20,7 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.common.text.Text; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.mapper.internal.TTLFieldMapper; -import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; -import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.InternalSearchHit; -import org.elasticsearch.search.internal.InternalSearchHitField; -import static java.util.Collections.singletonList; -import static java.util.Collections.singletonMap; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; public abstract class AbstractAsyncBulkIndexbyScrollActionMetadataTestCase< @@ -37,25 +28,19 @@ public abstract class AbstractAsyncBulkIndexbyScrollActionMetadataTestCase< Response extends BulkIndexByScrollResponse> extends AbstractAsyncBulkIndexByScrollActionTestCase { - /** - * Create a doc with some metadata. - */ - protected InternalSearchHit doc(String field, Object value) { - InternalSearchHit doc = new InternalSearchHit(0, "id", new Text("type"), singletonMap(field, - new InternalSearchHitField(field, singletonList(value)))); - doc.shardTarget(new SearchShardTarget("node", new Index("index", "uuid"), 0)); - return doc; + protected ScrollableHitSource.BasicHit doc() { + return new ScrollableHitSource.BasicHit("index", "type", "id", 0); } public void testTimestampIsCopied() { IndexRequest index = new IndexRequest(); - action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(TimestampFieldMapper.NAME, 10L)); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setTimestamp(10L)); assertEquals("10", index.timestamp()); } public void testTTL() throws Exception { IndexRequest index = new IndexRequest(); - action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(TTLFieldMapper.NAME, 10L)); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setTTL(10L)); assertEquals(timeValueMillis(10), index.ttl()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 9b094a9e2d3..77e792b8333 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -62,9 +62,10 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.Index; import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.index.reindex.ScrollableHitSource.Hit; +import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHits; import org.elasticsearch.search.internal.InternalSearchResponse; @@ -95,6 +96,7 @@ import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static java.util.Collections.singleton; +import static java.util.Collections.singletonList; import static org.apache.lucene.util.TestUtil.randomSimpleString; import static org.elasticsearch.action.bulk.BackoffPolicy.constantBackoff; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; @@ -103,7 +105,7 @@ import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; -import static org.hamcrest.Matchers.emptyCollectionOf; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; @@ -155,7 +157,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { * random scroll id so it is checked instead. */ private String scrollId() { - scrollId = randomSimpleString(random(), 1, 1000); // Empty strings get special behavior we don't want + scrollId = randomSimpleString(random(), 1, 10); // Empty strings get special behavior we don't want return scrollId; } @@ -216,10 +218,8 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { assertEquals(0, testTask.getStatus().getTotal()); long total = randomIntBetween(0, Integer.MAX_VALUE); - InternalSearchHits hits = new InternalSearchHits(null, total, 0); - InternalSearchResponse searchResponse = new InternalSearchResponse(hits, null, null, null, false, false); - new DummyAbstractAsyncBulkByScrollAction().onScrollResponse(timeValueSeconds(0), 0, - new SearchResponse(searchResponse, scrollId(), 5, 4, randomLong(), null)); + ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), total, emptyList(), null); + simulateScrollResponse(new DummyAbstractAsyncBulkByScrollAction(), timeValueSeconds(0), 0, response); assertEquals(total, testTask.getStatus().getTotal()); } @@ -229,12 +229,10 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { public void testScrollResponseBatchingBehavior() throws Exception { int maxBatches = randomIntBetween(0, 100); for (int batches = 1; batches < maxBatches; batches++) { - InternalSearchHit hit = new InternalSearchHit(0, "id", new Text("type"), emptyMap()); - InternalSearchHits hits = new InternalSearchHits(new InternalSearchHit[] { hit }, 0, 0); - InternalSearchResponse searchResponse = new InternalSearchResponse(hits, null, null, null, false, false); + Hit hit = new ScrollableHitSource.BasicHit("index", "type", "id", 0); + ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null); DummyAbstractAsyncBulkByScrollAction action = new DummyAbstractAsyncBulkByScrollAction(); - action.onScrollResponse(timeValueNanos(System.nanoTime()), 0, - new SearchResponse(searchResponse, scrollId(), 5, 4, randomLong(), null)); + simulateScrollResponse(action, timeValueNanos(System.nanoTime()), 0, response); // Use assert busy because the update happens on another thread final int expectedBatches = batches; @@ -314,16 +312,10 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { return null; } }; - InternalSearchHits hits = new InternalSearchHits(null, 0, 0); - InternalSearchResponse searchResponse = new InternalSearchResponse(hits, null, null, null, false, false); - new DummyAbstractAsyncBulkByScrollAction().onScrollResponse(timeValueNanos(System.nanoTime()), 10, - new SearchResponse(searchResponse, scrollId(), 5, 4, randomLong(), null)); - try { - listener.get(); - fail("Expected a failure"); - } catch (ExecutionException e) { - assertThat(e.getMessage(), equalTo("EsRejectedExecutionException[test]")); - } + ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 0, emptyList(), null); + simulateScrollResponse(new DummyAbstractAsyncBulkByScrollAction(), timeValueNanos(System.nanoTime()), 10, response); + ExecutionException e = expectThrows(ExecutionException.class, () -> listener.get()); + assertThat(e.getMessage(), equalTo("EsRejectedExecutionException[test]")); assertThat(client.scrollsCleared, contains(scrollId)); // When the task is rejected we don't increment the throttled timer @@ -335,12 +327,12 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { * scroll request going down. */ public void testShardFailuresAbortRequest() throws Exception { - ShardSearchFailure shardFailure = new ShardSearchFailure(new RuntimeException("test")); - InternalSearchResponse internalResponse = new InternalSearchResponse(null, null, null, null, false, null); - new DummyAbstractAsyncBulkByScrollAction().onScrollResponse(timeValueNanos(System.nanoTime()), 0, - new SearchResponse(internalResponse, scrollId(), 5, 4, randomLong(), new ShardSearchFailure[] { shardFailure })); + SearchFailure shardFailure = new SearchFailure(new RuntimeException("test")); + ScrollableHitSource.Response scrollResponse = new ScrollableHitSource.Response(false, singletonList(shardFailure), 0, + emptyList(), null); + simulateScrollResponse(new DummyAbstractAsyncBulkByScrollAction(), timeValueNanos(System.nanoTime()), 0, scrollResponse); BulkIndexByScrollResponse response = listener.get(); - assertThat(response.getIndexingFailures(), emptyCollectionOf(Failure.class)); + assertThat(response.getBulkFailures(), empty()); assertThat(response.getSearchFailures(), contains(shardFailure)); assertFalse(response.isTimedOut()); assertNull(response.getReasonCancelled()); @@ -351,12 +343,11 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { * Mimicks search timeouts. */ public void testSearchTimeoutsAbortRequest() throws Exception { - InternalSearchResponse internalResponse = new InternalSearchResponse(null, null, null, null, true, null); - new DummyAbstractAsyncBulkByScrollAction().onScrollResponse(timeValueNanos(System.nanoTime()), 0, - new SearchResponse(internalResponse, scrollId(), 5, 4, randomLong(), new ShardSearchFailure[0])); + ScrollableHitSource.Response scrollResponse = new ScrollableHitSource.Response(true, emptyList(), 0, emptyList(), null); + simulateScrollResponse(new DummyAbstractAsyncBulkByScrollAction(), timeValueNanos(System.nanoTime()), 0, scrollResponse); BulkIndexByScrollResponse response = listener.get(); - assertThat(response.getIndexingFailures(), emptyCollectionOf(Failure.class)); - assertThat(response.getSearchFailures(), emptyCollectionOf(ShardSearchFailure.class)); + assertThat(response.getBulkFailures(), empty()); + assertThat(response.getSearchFailures(), empty()); assertTrue(response.isTimedOut()); assertNull(response.getReasonCancelled()); assertThat(client.scrollsCleared, contains(scrollId)); @@ -371,8 +362,8 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { BulkResponse bulkResponse = new BulkResponse(new BulkItemResponse[] {new BulkItemResponse(0, "index", failure)}, randomLong()); action.onBulkResponse(timeValueNanos(System.nanoTime()), bulkResponse); BulkIndexByScrollResponse response = listener.get(); - assertThat(response.getIndexingFailures(), contains(failure)); - assertThat(response.getSearchFailures(), emptyCollectionOf(ShardSearchFailure.class)); + assertThat(response.getBulkFailures(), contains(failure)); + assertThat(response.getSearchFailures(), empty()); assertNull(response.getReasonCancelled()); } @@ -382,15 +373,13 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { public void testListenerReceiveBuildBulkExceptions() throws Exception { DummyAbstractAsyncBulkByScrollAction action = new DummyAbstractAsyncBulkByScrollAction() { @Override - protected BulkRequest buildBulk(Iterable docs) { + protected BulkRequest buildBulk(Iterable docs) { throw new RuntimeException("surprise"); } }; - InternalSearchHit hit = new InternalSearchHit(0, "id", new Text("type"), emptyMap()); - InternalSearchHits hits = new InternalSearchHits(new InternalSearchHit[] {hit}, 0, 0); - InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false); - SearchResponse searchResponse = new SearchResponse(internalResponse, scrollId(), 5, 4, randomLong(), null); - action.onScrollResponse(timeValueNanos(System.nanoTime()), 0, searchResponse); + Hit hit = new ScrollableHitSource.BasicHit("index", "type", "id", 0); + ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 1, singletonList(hit), null); + simulateScrollResponse(action, timeValueNanos(System.nanoTime()), 0, response); ExecutionException e = expectThrows(ExecutionException.class, () -> listener.get()); assertThat(e.getCause(), instanceOf(RuntimeException.class)); assertThat(e.getCause().getMessage(), equalTo("surprise")); @@ -499,9 +488,9 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { action.sendBulkRequest(timeValueNanos(System.nanoTime()), request); if (failWithRejection) { BulkIndexByScrollResponse response = listener.get(); - assertThat(response.getIndexingFailures(), hasSize(1)); - assertEquals(response.getIndexingFailures().get(0).getStatus(), RestStatus.TOO_MANY_REQUESTS); - assertThat(response.getSearchFailures(), emptyCollectionOf(ShardSearchFailure.class)); + assertThat(response.getBulkFailures(), hasSize(1)); + assertEquals(response.getBulkFailures().get(0).getStatus(), RestStatus.TOO_MANY_REQUESTS); + assertThat(response.getSearchFailures(), empty()); assertNull(response.getReasonCancelled()); } else { successLatch.await(10, TimeUnit.SECONDS); @@ -549,7 +538,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { if (addDestinationIndexes) { action.addDestinationIndices(singleton("foo")); } - action.startNormalTermination(emptyList(), emptyList(), false); + action.refreshAndFinish(emptyList(), emptyList(), false); if (shouldRefresh) { assertArrayEquals(new String[] {"foo"}, client.lastRefreshRequest.get().indices()); } else { @@ -563,7 +552,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { public void testCancelBeforeScrollResponse() throws Exception { // We bail so early we don't need to pass in a half way valid response. - cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> action.onScrollResponse(timeValueNanos(System.nanoTime()), 1, + cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> simulateScrollResponse(action, timeValueNanos(System.nanoTime()), 1, null)); } @@ -582,10 +571,10 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> action.startNextScroll(timeValueNanos(System.nanoTime()), 0)); } - public void testCancelBeforeStartNormalTermination() throws Exception { + public void testCancelBeforeRefreshAndFinish() throws Exception { // Refresh or not doesn't matter - we don't try to refresh. testRequest.setRefresh(usually()); - cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> action.startNormalTermination(emptyList(), emptyList(), false)); + cancelTaskCase((DummyAbstractAsyncBulkByScrollAction action) -> action.refreshAndFinish(emptyList(), emptyList(), false)); assertNull("No refresh was attempted", client.lastRefreshRequest.get()); } @@ -625,12 +614,10 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { action.setScroll(scrollId()); } long total = randomIntBetween(0, Integer.MAX_VALUE); - InternalSearchHits hits = new InternalSearchHits(null, total, 0); - InternalSearchResponse searchResponse = new InternalSearchResponse(hits, null, null, null, false, false); + ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), total, emptyList(), null); // Use a long delay here so the test will time out if the cancellation doesn't reschedule the throttled task - SearchResponse scrollResponse = new SearchResponse(searchResponse, scrollId(), 5, 4, randomLong(), null); testTask.rethrottle(1); - action.onScrollResponse(timeValueNanos(System.nanoTime()), 1000, scrollResponse); + simulateScrollResponse(action, timeValueNanos(System.nanoTime()), 1000, response); // Now that we've got our cancel we'll just verify that it all came through all right assertEquals(reason, listener.get(10, TimeUnit.SECONDS).getReasonCancelled()); @@ -656,23 +643,26 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { } } + /** + * Simulate a scroll response by setting the scroll id and firing the onScrollResponse method. + */ + private void simulateScrollResponse(DummyAbstractAsyncBulkByScrollAction action, TimeValue lastBatchTime, int lastBatchSize, + ScrollableHitSource.Response response) { + action.setScroll(scrollId()); + action.onScrollResponse(lastBatchTime, lastBatchSize, response); + } + private class DummyAbstractAsyncBulkByScrollAction extends AbstractAsyncBulkByScrollAction { public DummyAbstractAsyncBulkByScrollAction() { - super(testTask, logger, new ParentTaskAssigningClient(client, localNode, testTask), threadPool, testRequest, firstSearchRequest, - listener); + super(testTask, AsyncBulkByScrollActionTests.this.logger, new ParentTaskAssigningClient(client, localNode, testTask), + AsyncBulkByScrollActionTests.this.threadPool, testRequest, listener); } @Override - protected BulkRequest buildBulk(Iterable docs) { + protected BulkRequest buildBulk(Iterable docs) { return new BulkRequest(); } - - @Override - protected BulkIndexByScrollResponse buildResponse(TimeValue took, List indexingFailures, - List searchFailures, boolean timedOut) { - return new BulkIndexByScrollResponse(took, task.getStatus(), indexingFailures, searchFailures, timedOut); - } } /** @@ -805,7 +795,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase { super.doExecute(action, request, listener); } - private Throwable wrappedRejectedException() { + private Exception wrappedRejectedException() { Exception e = new EsRejectedExecutionException(); int wraps = randomIntBetween(0, 4); for (int i = 0; i < wraps; i++) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java index 05699c6f7af..72c650805ce 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java @@ -190,8 +190,8 @@ public class BulkByScrollTaskTests extends ESTestCase { } @Override - public void onFailure(Throwable t) { - errors.add(t); + public void onFailure(Exception e) { + errors.add(e); } }); @@ -271,7 +271,7 @@ public class BulkByScrollTaskTests extends ESTestCase { protected void doRun() throws Exception { } @Override - public void onFailure(Throwable t) { + public void onFailure(Exception e) { throw new UnsupportedOperationException(); } }); @@ -285,7 +285,7 @@ public class BulkByScrollTaskTests extends ESTestCase { public void testXContentRepresentationOfUnlimitedRequestsPerSecon() throws IOException { XContentBuilder builder = JsonXContent.contentBuilder(); task.getStatus().toXContent(builder, ToXContent.EMPTY_PARAMS); - assertThat(builder.string(), containsString("\"requests_per_second\":\"unlimited\"")); + assertThat(builder.string(), containsString("\"requests_per_second\":-1")); } public void testPerfectlyThrottledBatchTime() { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java index 4ef16c59141..c0c06b14d55 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponseMatcher.java @@ -124,7 +124,7 @@ public class BulkIndexByScrollResponseMatcher extends TypeSafeMatcher whitelist = randomWhitelist(); + String[] inList = whitelist.iterator().next().split(":"); + String host = inList[0]; + int port = Integer.valueOf(inList[1]); + checkRemoteWhitelist(whitelist, new RemoteInfo(randomAsciiOfLength(5), host, port, new BytesArray("test"), null, null), + localhostOrNone()); + } + + public void testMyselfInWhitelistRemote() throws UnknownHostException { + Set whitelist = randomWhitelist(); + whitelist.add("myself"); + TransportAddress publishAddress = new InetSocketTransportAddress(InetAddress.getByAddress(new byte[] {0x7f,0x00,0x00,0x01}), 9200); + checkRemoteWhitelist(whitelist, new RemoteInfo(randomAsciiOfLength(5), "127.0.0.1", 9200, new BytesArray("test"), null, null), + publishAddress); + } + + public void testUnwhitelistedRemote() { + int port = between(1, Integer.MAX_VALUE); + Exception e = expectThrows(IllegalArgumentException.class, () -> checkRemoteWhitelist(randomWhitelist(), + new RemoteInfo(randomAsciiOfLength(5), "not in list", port, new BytesArray("test"), null, null), localhostOrNone())); + assertEquals("[not in list:" + port + "] not whitelisted in reindex.remote.whitelist", e.getMessage()); + } + + private Set randomWhitelist() { + int size = between(1, 100); + Set set = new HashSet<>(size); + while (set.size() < size) { + set.add(randomAsciiOfLength(5) + ':' + between(1, Integer.MAX_VALUE)); + } + return set; + } + + private TransportAddress localhostOrNone() { + return randomFrom(random(), null, localhost); + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java index 670fcefbf55..dab0cab8d8a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexMetadataTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; /** * Index-by-search test for ttl, timestamp, and routing. @@ -29,7 +28,7 @@ import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMetadataTestCase { public void testRoutingCopiedByDefault() throws Exception { IndexRequest index = new IndexRequest(); - action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setRouting("foo")); assertEquals("foo", index.routing()); } @@ -37,7 +36,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("keep"); IndexRequest index = new IndexRequest(); - action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setRouting("foo")); assertEquals("foo", index.routing()); } @@ -45,7 +44,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("discard"); IndexRequest index = new IndexRequest(); - action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setRouting("foo")); assertEquals(null, index.routing()); } @@ -53,7 +52,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("=cat"); IndexRequest index = new IndexRequest(); - action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setRouting("foo")); assertEquals("cat", index.routing()); } @@ -61,7 +60,7 @@ public class ReindexMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMe TransportReindexAction.AsyncIndexBySearchAction action = action(); action.mainRequest.getDestination().routing("==]"); IndexRequest index = new IndexRequest(); - action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); + action.copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setRouting("foo")); assertEquals("=]", index.routing()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java new file mode 100644 index 00000000000..efaf5e627ad --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.index.reindex.remote.RemoteInfo; +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; + +/** + * Tests some of the validation of {@linkplain ReindexRequest}. See reindex's rest tests for much more. + */ +public class ReindexRequestTests extends ESTestCase { + public void testTimestampAndTtlNotAllowed() { + ReindexRequest reindex = request(); + reindex.getDestination().ttl("1s").timestamp("now"); + ActionRequestValidationException e = reindex.validate(); + assertEquals("Validation Failed: 1: setting ttl on destination isn't supported. use scripts instead.;" + + "2: setting timestamp on destination isn't supported. use scripts instead.;", + e.getMessage()); + } + + public void testReindexFromRemoteDoesNotSupportSearchQuery() { + ReindexRequest reindex = request(); + reindex.setRemoteInfo(new RemoteInfo(randomAsciiOfLength(5), randomAsciiOfLength(5), between(1, Integer.MAX_VALUE), + new BytesArray("real_query"), null, null)); + reindex.getSearchRequest().source().query(matchAllQuery()); // Unsupported place to put query + ActionRequestValidationException e = reindex.validate(); + assertEquals("Validation Failed: 1: reindex from remote sources should use RemoteInfo's query instead of source's query;", + e.getMessage()); + } + + private ReindexRequest request() { + ReindexRequest reindex = new ReindexRequest(new SearchRequest(), new IndexRequest()); + reindex.getSearchRequest().indices("source"); + reindex.getDestination().index("dest"); + return reindex; + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java index 74b7548cd63..c70b80b8e37 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java @@ -22,7 +22,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptService; import java.util.Map; @@ -106,7 +105,7 @@ public class ReindexScriptTests extends AbstractAsyncBulkIndexByScrollActionScri } public void testSetTimestamp() throws Exception { - String timestamp = randomFrom(null, "now", "1234"); + String timestamp = randomFrom("now", "1234", null); IndexRequest index = applyScript((Map ctx) -> ctx.put("_timestamp", timestamp)); assertEquals(timestamp, index.timestamp()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSameIndexTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java similarity index 62% rename from modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSameIndexTests.java rename to modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java index f1218414af7..66896406c66 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSameIndexTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java @@ -30,15 +30,20 @@ import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.reindex.remote.RemoteInfo; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.containsString; /** - * Tests that indexing from an index back into itself fails the request. + * Tests source and target index validation of reindex. Mostly that means testing that indexing from an index back into itself fails the + * request. Note that we can't catch you trying to remotely reindex from yourself into yourself. We actually assert here that reindexes + * from remote don't need to come from existing indexes. It'd be silly to fail requests if the source index didn't exist on the target + * cluster.... */ -public class ReindexSameIndexTests extends ESTestCase { +public class ReindexSourceTargetValidationTests extends ESTestCase { private static final ClusterState STATE = ClusterState.builder(new ClusterName("test")).metaData(MetaData.builder() .put(index("target", "target_alias", "target_multi"), true) .put(index("target2", "target_multi"), true) @@ -50,7 +55,7 @@ public class ReindexSameIndexTests extends ESTestCase { private static final IndexNameExpressionResolver INDEX_NAME_EXPRESSION_RESOLVER = new IndexNameExpressionResolver(Settings.EMPTY); private static final AutoCreateIndex AUTO_CREATE_INDEX = new AutoCreateIndex(Settings.EMPTY, INDEX_NAME_EXPRESSION_RESOLVER); - public void testObviousCases() throws Exception { + public void testObviousCases() { fails("target", "target"); fails("target", "foo", "bar", "target", "baz"); fails("target", "foo", "bar", "target", "baz", "target"); @@ -58,7 +63,7 @@ public class ReindexSameIndexTests extends ESTestCase { succeeds("target", "source", "source2"); } - public void testAliasesContainTarget() throws Exception { + public void testAliasesContainTarget() { fails("target", "target_alias"); fails("target_alias", "target"); fails("target", "foo", "bar", "target_alias", "baz"); @@ -71,31 +76,33 @@ public class ReindexSameIndexTests extends ESTestCase { succeeds("target", "source", "source2", "source_multi"); } - public void testTargetIsAlias() throws Exception { - try { - succeeds("target_multi", "foo"); - fail("Expected failure"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("Alias [target_multi] has more than one indices associated with it [[")); - // The index names can come in either order - assertThat(e.getMessage(), containsString("target")); - assertThat(e.getMessage(), containsString("target2")); - } + public void testTargetIsAlias() { + Exception e = expectThrows(IllegalArgumentException.class, () -> succeeds("target_multi", "foo")); + assertThat(e.getMessage(), containsString("Alias [target_multi] has more than one indices associated with it [[")); + // The index names can come in either order + assertThat(e.getMessage(), containsString("target")); + assertThat(e.getMessage(), containsString("target2")); } - private void fails(String target, String... sources) throws Exception { - try { - succeeds(target, sources); - fail("Expected an exception"); - } catch (ActionRequestValidationException e) { - assertThat(e.getMessage(), - containsString("reindex cannot write into an index its reading from [target]")); - } + public void testRemoteInfoSkipsValidation() { + // The index doesn't have to exist + succeeds(new RemoteInfo(randomAsciiOfLength(5), "test", 9200, new BytesArray("test"), null, null), "does_not_exist", "target"); + // And it doesn't matter if they are the same index. They are considered to be different because the remote one is, well, remote. + succeeds(new RemoteInfo(randomAsciiOfLength(5), "test", 9200, new BytesArray("test"), null, null), "target", "target"); } - private void succeeds(String target, String... sources) throws Exception { - TransportReindexAction.validateAgainstAliases(new SearchRequest(sources), new IndexRequest(target), INDEX_NAME_EXPRESSION_RESOLVER, - AUTO_CREATE_INDEX, STATE); + private void fails(String target, String... sources) { + Exception e = expectThrows(ActionRequestValidationException.class, () -> succeeds(target, sources)); + assertThat(e.getMessage(), containsString("reindex cannot write into an index its reading from [target]")); + } + + private void succeeds(String target, String... sources) { + succeeds(null, target, sources); + } + + private void succeeds(RemoteInfo remoteInfo, String target, String... sources) { + TransportReindexAction.validateAgainstAliases(new SearchRequest(sources), new IndexRequest(target), remoteInfo, + INDEX_NAME_EXPRESSION_RESOLVER, AUTO_CREATE_INDEX, STATE); } private static IndexMetaData index(String name, String... aliases) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java new file mode 100644 index 00000000000..1cbec59c49d --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java @@ -0,0 +1,121 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex; + +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.reindex.RestReindexAction.ReindexParseContext; +import org.elasticsearch.index.reindex.remote.RemoteInfo; +import org.elasticsearch.indices.query.IndicesQueriesRegistry; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class RestReindexActionTests extends ESTestCase { + public void testBuildRemoteInfoNoRemote() throws IOException { + assertNull(RestReindexAction.buildRemoteInfo(new HashMap<>())); + } + + public void testBuildRemoteInfoFullyLoaded() throws IOException { + Map remote = new HashMap<>(); + remote.put("host", "https://example.com:9200"); + remote.put("username", "testuser"); + remote.put("password", "testpass"); + + Map query = new HashMap<>(); + query.put("a", "b"); + + Map source = new HashMap<>(); + source.put("remote", remote); + source.put("query", query); + + RemoteInfo remoteInfo = RestReindexAction.buildRemoteInfo(source); + assertEquals("https", remoteInfo.getScheme()); + assertEquals("example.com", remoteInfo.getHost()); + assertEquals(9200, remoteInfo.getPort()); + assertEquals("{\n \"a\" : \"b\"\n}", remoteInfo.getQuery().utf8ToString()); + assertEquals("testuser", remoteInfo.getUsername()); + assertEquals("testpass", remoteInfo.getPassword()); + } + + public void testBuildRemoteInfoWithoutAllParts() throws IOException { + expectThrows(IllegalArgumentException.class, () -> buildRemoteInfoHostTestCase("example.com")); + expectThrows(IllegalArgumentException.class, () -> buildRemoteInfoHostTestCase("example.com:9200")); + expectThrows(IllegalArgumentException.class, () -> buildRemoteInfoHostTestCase("http://example.com")); + } + + public void testBuildRemoteInfoWithAllParts() throws IOException { + RemoteInfo info = buildRemoteInfoHostTestCase("http://example.com:9200"); + assertEquals("http", info.getScheme()); + assertEquals("example.com", info.getHost()); + assertEquals(9200, info.getPort()); + + info = buildRemoteInfoHostTestCase("https://other.example.com:9201"); + assertEquals("https", info.getScheme()); + assertEquals("other.example.com", info.getHost()); + assertEquals(9201, info.getPort()); + } + + public void testReindexFromRemoteRequestParsing() throws IOException { + BytesReference request; + try (XContentBuilder b = JsonXContent.contentBuilder()) { + b.startObject(); { + b.startObject("source"); { + b.startObject("remote"); { + b.field("host", "http://localhost:9200"); + } + b.endObject(); + b.field("index", "source"); + } + b.endObject(); + b.startObject("dest"); { + b.field("index", "dest"); + } + b.endObject(); + } + b.endObject(); + request = b.bytes(); + } + try (XContentParser p = JsonXContent.jsonXContent.createParser(request)) { + ReindexRequest r = new ReindexRequest(new SearchRequest(), new IndexRequest()); + RestReindexAction.PARSER.parse(p, r, + new ReindexParseContext(new IndicesQueriesRegistry(), null, null, ParseFieldMatcher.STRICT)); + assertEquals("localhost", r.getRemoteInfo().getHost()); + assertArrayEquals(new String[] {"source"}, r.getSearchRequest().indices()); + } + } + + private RemoteInfo buildRemoteInfoHostTestCase(String hostInRest) throws IOException { + Map remote = new HashMap<>(); + remote.put("host", hostInRest); + + Map source = new HashMap<>(); + source.put("remote", remote); + + return RestReindexAction.buildRemoteInfo(source); + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java index 09945c9372b..fd251d96a1f 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java @@ -20,25 +20,34 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ListenableActionFuture; +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.bulk.Retry; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.reindex.remote.RemoteInfo; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Netty3Plugin; import org.junit.After; import org.junit.Before; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.concurrent.CyclicBarrier; import static org.elasticsearch.index.reindex.ReindexTestCase.matcher; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; @@ -53,7 +62,19 @@ public class RetryTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return pluginList(ReindexPlugin.class); + return pluginList(ReindexPlugin.class, Netty3Plugin.class, BogusPlugin.class); // we need netty here to http communication + } + + public static final class BogusPlugin extends Plugin { + // se Netty3Plugin.... this runs without the permission from the netty3 module so it will fail since reindex can't set the property + // to make it still work we disable that check but need to register the setting first + private static final Setting ASSERT_NETTY_BUGLEVEL = Setting.boolSetting("netty.assert.buglevel", true, + Setting.Property.NodeScope); + + @Override + public List> getSettings() { + return Collections.singletonList(ASSERT_NETTY_BUGLEVEL); + } } /** @@ -63,11 +84,16 @@ public class RetryTests extends ESSingleNodeTestCase { protected Settings nodeSettings() { Settings.Builder settings = Settings.builder().put(super.nodeSettings()); // Use pools of size 1 so we can block them + settings.put("netty.assert.buglevel", false); settings.put("thread_pool.bulk.size", 1); settings.put("thread_pool.search.size", 1); // Use queues of size 1 because size 0 is broken and because search requests need the queue to function settings.put("thread_pool.bulk.queue_size", 1); settings.put("thread_pool.search.queue_size", 1); + // Enable http so we can test retries on reindex from remote. In this case the "remote" cluster is just this cluster. + settings.put(NetworkModule.HTTP_ENABLED.getKey(), true); + // Whitelist reindexing from the http host we're going to use + settings.put(TransportReindexAction.REMOTE_CLUSTER_WHITELIST.getKey(), "myself"); return settings.build(); } @@ -97,6 +123,15 @@ public class RetryTests extends ESSingleNodeTestCase { matcher().created(DOC_COUNT)); } + public void testReindexFromRemote() throws Exception { + NodeInfo nodeInfo = client().admin().cluster().prepareNodesInfo().get().getNodes().get(0); + TransportAddress address = nodeInfo.getHttp().getAddress().publishAddress(); + RemoteInfo remote = new RemoteInfo("http", address.getHost(), address.getPort(), new BytesArray("{\"match_all\":{}}"), null, null); + ReindexRequestBuilder request = ReindexAction.INSTANCE.newRequestBuilder(client()).source("source").destination("dest") + .setRemoteInfo(remote); + testCase(ReindexAction.NAME, request, matcher().created(DOC_COUNT)); + } + public void testUpdateByQuery() throws Exception { testCase(UpdateByQueryAction.NAME, UpdateByQueryAction.INSTANCE.newRequestBuilder(client()).source("source"), matcher().updated(DOC_COUNT)); @@ -118,34 +153,41 @@ public class RetryTests extends ESSingleNodeTestCase { logger.info("Starting request"); ListenableActionFuture responseListener = request.execute(); - logger.info("Waiting for search rejections on the initial search"); - assertBusy(() -> assertThat(taskStatus(action).getSearchRetries(), greaterThan(0L))); + try { + logger.info("Waiting for search rejections on the initial search"); + assertBusy(() -> assertThat(taskStatus(action).getSearchRetries(), greaterThan(0L))); - logger.info("Blocking bulk and unblocking search so we start to get bulk rejections"); - CyclicBarrier bulkBlock = blockExecutor(ThreadPool.Names.BULK); - initialSearchBlock.await(); + logger.info("Blocking bulk and unblocking search so we start to get bulk rejections"); + CyclicBarrier bulkBlock = blockExecutor(ThreadPool.Names.BULK); + initialSearchBlock.await(); - logger.info("Waiting for bulk rejections"); - assertBusy(() -> assertThat(taskStatus(action).getBulkRetries(), greaterThan(0L))); + logger.info("Waiting for bulk rejections"); + assertBusy(() -> assertThat(taskStatus(action).getBulkRetries(), greaterThan(0L))); - // Keep a copy of the current number of search rejections so we can assert that we get more when we block the scroll - long initialSearchRejections = taskStatus(action).getSearchRetries(); + // Keep a copy of the current number of search rejections so we can assert that we get more when we block the scroll + long initialSearchRejections = taskStatus(action).getSearchRetries(); - logger.info("Blocking search and unblocking bulk so we should get search rejections for the scroll"); - CyclicBarrier scrollBlock = blockExecutor(ThreadPool.Names.SEARCH); - bulkBlock.await(); + logger.info("Blocking search and unblocking bulk so we should get search rejections for the scroll"); + CyclicBarrier scrollBlock = blockExecutor(ThreadPool.Names.SEARCH); + bulkBlock.await(); - logger.info("Waiting for search rejections for the scroll"); - assertBusy(() -> assertThat(taskStatus(action).getSearchRetries(), greaterThan(initialSearchRejections))); + logger.info("Waiting for search rejections for the scroll"); + assertBusy(() -> assertThat(taskStatus(action).getSearchRetries(), greaterThan(initialSearchRejections))); - logger.info("Unblocking the scroll"); - scrollBlock.await(); + logger.info("Unblocking the scroll"); + scrollBlock.await(); - logger.info("Waiting for the request to finish"); - BulkIndexByScrollResponse response = responseListener.get(); - assertThat(response, matcher); - assertThat(response.getBulkRetries(), greaterThan(0L)); - assertThat(response.getSearchRetries(), greaterThan(initialSearchRejections)); + logger.info("Waiting for the request to finish"); + BulkIndexByScrollResponse response = responseListener.get(); + assertThat(response, matcher); + assertThat(response.getBulkRetries(), greaterThan(0L)); + assertThat(response.getSearchRetries(), greaterThan(initialSearchRejections)); + } finally { + // Fetch the response just in case we blew up half way through. This will make sure the failure is thrown up to the top level. + BulkIndexByScrollResponse response = responseListener.get(); + assertThat(response.getSearchFailures(), empty()); + assertThat(response.getBulkFailures(), empty()); + } } /** @@ -162,7 +204,7 @@ public class RetryTests extends ESSingleNodeTestCase { barrier.await(); logger.info("Blocked the [{}] executor", name); barrier.await(); - logger.info("Ublocking the [{}] executor", name); + logger.info("Unblocking the [{}] executor", name); } catch (Exception e) { throw new RuntimeException(e); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index d1cb77361bb..3e3b3a63d62 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -19,20 +19,21 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.WriteConsistencyLevel; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.Index; -import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; +import org.elasticsearch.index.reindex.remote.RemoteInfo; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService.ScriptType; -import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; @@ -56,11 +57,28 @@ public class RoundTripTests extends ESTestCase { randomRequest(reindex); reindex.getDestination().version(randomFrom(Versions.MATCH_ANY, Versions.MATCH_DELETED, 12L, 1L, 123124L, 12L)); reindex.getDestination().index("test"); + if (randomBoolean()) { + int port = between(1, Integer.MAX_VALUE); + BytesReference query = new BytesArray(randomAsciiOfLength(5)); + String username = randomBoolean() ? randomAsciiOfLength(5) : null; + String password = username != null && randomBoolean() ? randomAsciiOfLength(5) : null; + reindex.setRemoteInfo(new RemoteInfo(randomAsciiOfLength(5), randomAsciiOfLength(5), port, query, username, password)); + } ReindexRequest tripped = new ReindexRequest(); roundTrip(reindex, tripped); assertRequestEquals(reindex, tripped); assertEquals(reindex.getDestination().version(), tripped.getDestination().version()); assertEquals(reindex.getDestination().index(), tripped.getDestination().index()); + if (reindex.getRemoteInfo() == null) { + assertNull(tripped.getRemoteInfo()); + } else { + assertNotNull(tripped.getRemoteInfo()); + assertEquals(reindex.getRemoteInfo().getScheme(), tripped.getRemoteInfo().getScheme()); + assertEquals(reindex.getRemoteInfo().getHost(), tripped.getRemoteInfo().getHost()); + assertEquals(reindex.getRemoteInfo().getQuery(), tripped.getRemoteInfo().getQuery()); + assertEquals(reindex.getRemoteInfo().getUsername(), tripped.getRemoteInfo().getUsername()); + assertEquals(reindex.getRemoteInfo().getPassword(), tripped.getRemoteInfo().getPassword()); + } } public void testUpdateByQueryRequest() throws IOException { @@ -149,13 +167,19 @@ public class RoundTripTests extends ESTestCase { randomSimpleString(random()), new IllegalArgumentException("test"))); } - private List randomSearchFailures() { - if (usually()) { + private List randomSearchFailures() { + if (randomBoolean()) { return emptyList(); } - Index index = new Index(randomSimpleString(random()), "uuid"); - return singletonList(new ShardSearchFailure(randomSimpleString(random()), - new SearchShardTarget(randomSimpleString(random()), index, randomInt()), randomFrom(RestStatus.values()))); + String index = null; + Integer shardId = null; + String nodeId = null; + if (randomBoolean()) { + index = randomAsciiOfLength(5); + shardId = randomInt(); + nodeId = usually() ? randomAsciiOfLength(5) : null; + } + return singletonList(new SearchFailure(new ElasticsearchException("foo"), index, shardId, nodeId)); } private void roundTrip(Streamable example, Streamable empty) throws IOException { @@ -182,10 +206,10 @@ public class RoundTripTests extends ESTestCase { private void assertResponseEquals(BulkIndexByScrollResponse expected, BulkIndexByScrollResponse actual) { assertEquals(expected.getTook(), actual.getTook()); assertTaskStatusEquals(expected.getStatus(), actual.getStatus()); - assertEquals(expected.getIndexingFailures().size(), actual.getIndexingFailures().size()); - for (int i = 0; i < expected.getIndexingFailures().size(); i++) { - Failure expectedFailure = expected.getIndexingFailures().get(i); - Failure actualFailure = actual.getIndexingFailures().get(i); + assertEquals(expected.getBulkFailures().size(), actual.getBulkFailures().size()); + for (int i = 0; i < expected.getBulkFailures().size(); i++) { + Failure expectedFailure = expected.getBulkFailures().get(i); + Failure actualFailure = actual.getBulkFailures().get(i); assertEquals(expectedFailure.getIndex(), actualFailure.getIndex()); assertEquals(expectedFailure.getType(), actualFailure.getType()); assertEquals(expectedFailure.getId(), actualFailure.getId()); @@ -194,13 +218,15 @@ public class RoundTripTests extends ESTestCase { } assertEquals(expected.getSearchFailures().size(), actual.getSearchFailures().size()); for (int i = 0; i < expected.getSearchFailures().size(); i++) { - ShardSearchFailure expectedFailure = expected.getSearchFailures().get(i); - ShardSearchFailure actualFailure = actual.getSearchFailures().get(i); - assertEquals(expectedFailure.shard(), actualFailure.shard()); - assertEquals(expectedFailure.status(), actualFailure.status()); - // We can't use getCause because throwable doesn't implement equals - assertEquals(expectedFailure.reason(), actualFailure.reason()); + SearchFailure expectedFailure = expected.getSearchFailures().get(i); + SearchFailure actualFailure = actual.getSearchFailures().get(i); + assertEquals(expectedFailure.getIndex(), actualFailure.getIndex()); + assertEquals(expectedFailure.getShardId(), actualFailure.getShardId()); + assertEquals(expectedFailure.getNodeId(), actualFailure.getNodeId()); + assertEquals(expectedFailure.getReason().getClass(), actualFailure.getReason().getClass()); + assertEquals(expectedFailure.getReason().getMessage(), actualFailure.getReason().getMessage()); } + } private void assertTaskStatusEquals(BulkByScrollTask.Status expected, BulkByScrollTask.Status actual) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java index bb6a33b593a..6ebb0749792 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryMetadataTests.java @@ -21,13 +21,12 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.index.mapper.internal.RoutingFieldMapper; public class UpdateByQueryMetadataTests extends AbstractAsyncBulkIndexbyScrollActionMetadataTestCase { public void testRoutingIsCopied() throws Exception { IndexRequest index = new IndexRequest(); - action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc(RoutingFieldMapper.NAME, "foo")); + action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setRouting("foo")); assertEquals("foo", index.routing()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java index faea69b870f..6bbcbd6e643 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWhileModifyingTests.java @@ -44,7 +44,7 @@ public class UpdateByQueryWhileModifyingTests extends ReindexTestCase { AtomicReference value = new AtomicReference<>(randomSimpleString(random())); indexRandom(true, client().prepareIndex("test", "test", "test").setSource("test", value.get())); - AtomicReference failure = new AtomicReference<>(); + AtomicReference failure = new AtomicReference<>(); AtomicBoolean keepUpdating = new AtomicBoolean(true); Thread updater = new Thread(() -> { while (keepUpdating.get()) { @@ -52,8 +52,8 @@ public class UpdateByQueryWhileModifyingTests extends ReindexTestCase { BulkIndexByScrollResponse response = updateByQuery().source("test").refresh(true).abortOnVersionConflict(false).get(); assertThat(response, matcher().updated(either(equalTo(0L)).or(equalTo(1L))) .versionConflicts(either(equalTo(0L)).or(equalTo(1L)))); - } catch (Throwable t) { - failure.set(t); + } catch (Exception e) { + failure.set(e); } } }); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java index 1c57c202766..c5b9d4da64f 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.reindex; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.script.ScriptService; import java.util.Date; @@ -48,7 +49,7 @@ public class UpdateByQueryWithScriptTests @Override protected UpdateByQueryRequest request() { - return new UpdateByQueryRequest(); + return new UpdateByQueryRequest(new SearchRequest()); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java new file mode 100644 index 00000000000..5492a05986c --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java @@ -0,0 +1,36 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.test.ESTestCase; + +public class RemoteInfoTests extends ESTestCase { + public void testToString() { + RemoteInfo info = new RemoteInfo("http", "testhost", 12344, new BytesArray("testquery"), null, null); + assertEquals("host=testhost port=12344 query=testquery", info.toString()); + info = new RemoteInfo("http", "testhost", 12344, new BytesArray("testquery"), "testuser", null); + assertEquals("host=testhost port=12344 query=testquery username=testuser", info.toString()); + info = new RemoteInfo("http", "testhost", 12344, new BytesArray("testquery"), "testuser", "testpass"); + assertEquals("host=testhost port=12344 query=testquery username=testuser password=<<>>", info.toString()); + info = new RemoteInfo("https", "testhost", 12344, new BytesArray("testquery"), "testuser", "testpass"); + assertEquals("scheme=https host=testhost port=12344 query=testquery username=testuser password=<<>>", info.toString()); + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java new file mode 100644 index 00000000000..9bbfd175a79 --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java @@ -0,0 +1,181 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.apache.http.HttpEntity; +import org.apache.http.entity.ContentType; +import org.elasticsearch.Version; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.Map; + +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchEntity; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchParams; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.initialSearchPath; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.scrollEntity; +import static org.elasticsearch.index.reindex.remote.RemoteRequestBuilders.scrollParams; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; + +public class RemoteRequestBuildersTests extends ESTestCase { + public void testIntialSearchPath() { + SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); + + assertEquals("/_search", initialSearchPath(searchRequest)); + searchRequest.indices("a"); + searchRequest.types("b"); + assertEquals("/a/b/_search", initialSearchPath(searchRequest)); + searchRequest.indices("a", "b"); + searchRequest.types("c", "d"); + assertEquals("/a,b/c,d/_search", initialSearchPath(searchRequest)); + + searchRequest.indices("cat,"); + expectBadStartRequest(searchRequest, "Index", ",", "cat,"); + searchRequest.indices("cat,", "dog"); + expectBadStartRequest(searchRequest, "Index", ",", "cat,"); + searchRequest.indices("dog", "cat,"); + expectBadStartRequest(searchRequest, "Index", ",", "cat,"); + searchRequest.indices("cat/"); + expectBadStartRequest(searchRequest, "Index", "/", "cat/"); + searchRequest.indices("cat/", "dog"); + expectBadStartRequest(searchRequest, "Index", "/", "cat/"); + searchRequest.indices("dog", "cat/"); + expectBadStartRequest(searchRequest, "Index", "/", "cat/"); + + searchRequest.indices("ok"); + searchRequest.types("cat,"); + expectBadStartRequest(searchRequest, "Type", ",", "cat,"); + searchRequest.types("cat,", "dog"); + expectBadStartRequest(searchRequest, "Type", ",", "cat,"); + searchRequest.types("dog", "cat,"); + expectBadStartRequest(searchRequest, "Type", ",", "cat,"); + searchRequest.types("cat/"); + expectBadStartRequest(searchRequest, "Type", "/", "cat/"); + searchRequest.types("cat/", "dog"); + expectBadStartRequest(searchRequest, "Type", "/", "cat/"); + searchRequest.types("dog", "cat/"); + expectBadStartRequest(searchRequest, "Type", "/", "cat/"); + } + + private void expectBadStartRequest(SearchRequest searchRequest, String type, String bad, String failed) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> initialSearchPath(searchRequest)); + assertEquals(type + " containing [" + bad + "] not supported but got [" + failed + "]", e.getMessage()); + } + + public void testInitialSearchParamsSort() { + SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); + + // Test sort:_doc for versions that support it. + Version remoteVersion = Version.fromId(between(Version.V_2_1_0_ID, Version.CURRENT.id)); + searchRequest.source().sort("_doc"); + assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("sorts", "_doc:asc")); + + // Test search_type scan for versions that don't support sort:_doc. + remoteVersion = Version.fromId(between(0, Version.V_2_1_0_ID - 1)); + assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("search_type", "scan")); + + // Test sorting by some field. Version doesn't matter. + remoteVersion = Version.fromId(between(0, Version.CURRENT.id)); + searchRequest.source().sorts().clear(); + searchRequest.source().sort("foo"); + assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("sorts", "foo:asc")); + } + + public void testInitialSearchParamsFields() { + SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); + + // Test request without any fields + Version remoteVersion = Version.fromId(between(0, Version.CURRENT.id)); + assertThat(initialSearchParams(searchRequest, remoteVersion), + not(either(hasKey("stored_fields")).or(hasKey("fields")))); + + // Setup some fields for the next two tests + searchRequest.source().storedField("_source").storedField("_id"); + + // Test stored_fields for versions that support it + remoteVersion = Version.fromId(between(Version.V_5_0_0_alpha4_ID, Version.CURRENT.id)); + assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("stored_fields", "_source,_id")); + + // Test fields for versions that support it + remoteVersion = Version.fromId(between(0, Version.V_5_0_0_alpha4_ID - 1)); + assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("fields", "_source,_id")); + } + + public void testInitialSearchParamsMisc() { + SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder()); + Version remoteVersion = Version.fromId(between(0, Version.CURRENT.id)); + + TimeValue scroll = null; + if (randomBoolean()) { + scroll = TimeValue.parseTimeValue(randomPositiveTimeValue(), "test"); + searchRequest.scroll(scroll); + } + int size = between(0, Integer.MAX_VALUE); + searchRequest.source().size(size); + Boolean fetchVersion = null; + if (randomBoolean()) { + fetchVersion = randomBoolean(); + searchRequest.source().version(fetchVersion); + } + + Map params = initialSearchParams(searchRequest, remoteVersion); + + assertThat(params, scroll == null ? not(hasKey("scroll")) : hasEntry("scroll", scroll.toString())); + assertThat(params, hasEntry("size", Integer.toString(size))); + assertThat(params, fetchVersion == null || fetchVersion == true ? hasEntry("version", null) : not(hasEntry("version", null))); + } + + public void testInitialSearchEntity() throws IOException { + String query = "{\"match_all\":{}}"; + HttpEntity entity = initialSearchEntity(new BytesArray(query)); + assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue()); + assertEquals("{\"query\":" + query + "}", + Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8))); + + // Invalid XContent fails + RuntimeException e = expectThrows(RuntimeException.class, () -> initialSearchEntity(new BytesArray("{}, \"trailing\": {}"))); + assertThat(e.getCause().getMessage(), containsString("Unexpected character (',' (code 44))")); + e = expectThrows(RuntimeException.class, () -> initialSearchEntity(new BytesArray("{"))); + assertThat(e.getCause().getMessage(), containsString("Unexpected end-of-input")); + } + + public void testScrollParams() { + TimeValue scroll = TimeValue.parseTimeValue(randomPositiveTimeValue(), "test"); + assertThat(scrollParams(scroll), hasEntry("scroll", scroll.toString())); + } + + public void testScrollEntity() throws IOException { + String scroll = randomAsciiOfLength(30); + HttpEntity entity = scrollEntity(scroll); + assertEquals(ContentType.TEXT_PLAIN.toString(), entity.getContentType().getValue()); + assertEquals(scroll, Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8))); + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java new file mode 100644 index 00000000000..f8f3e82b4bb --- /dev/null +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -0,0 +1,381 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.reindex.remote; + +import org.apache.http.HttpEntity; +import org.elasticsearch.Version; +import org.elasticsearch.action.bulk.BackoffPolicy; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.index.reindex.ScrollableHitSource.Response; +import org.elasticsearch.index.reindex.remote.RemoteScrollableHitSource.ResponseListener; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.Map; +import java.util.concurrent.Executor; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; +import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; + +public class RemoteScrollableHitSourceTests extends ESTestCase { + private final String FAKE_SCROLL_ID = "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll"; + private int retries; + private ThreadPool threadPool; + private SearchRequest searchRequest; + private int retriesAllowed; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + threadPool = new TestThreadPool(getTestName()) { + @Override + public Executor executor(String name) { + return r -> r.run(); + } + + @Override + public ScheduledFuture schedule(TimeValue delay, String name, Runnable command) { + command.run(); + return null; + } + }; + retries = 0; + searchRequest = new SearchRequest(); + searchRequest.scroll(timeValueMinutes(5)); + searchRequest.source(new SearchSourceBuilder().size(10).version(true).sort("_doc").size(123)); + retriesAllowed = 0; + } + + @After + @Override + public void tearDown() throws Exception { + super.tearDown(); + terminate(threadPool); + } + + public void testLookupRemoteVersion() throws Exception { + sourceWithMockedRemoteCall(false, "main/0_20_5.json").lookupRemoteVersion(v -> assertEquals(Version.fromString("0.20.5"), v)); + sourceWithMockedRemoteCall(false, "main/0_90_13.json").lookupRemoteVersion(v -> assertEquals(Version.fromString("0.90.13"), v)); + sourceWithMockedRemoteCall(false, "main/1_7_5.json").lookupRemoteVersion(v -> assertEquals(Version.fromString("1.7.5"), v)); + sourceWithMockedRemoteCall(false, "main/2_3_3.json").lookupRemoteVersion(v -> assertEquals(Version.V_2_3_3, v)); + sourceWithMockedRemoteCall(false, "main/5_0_0_alpha_3.json").lookupRemoteVersion(v -> assertEquals(Version.V_5_0_0_alpha3, v)); + } + + public void testParseStartOk() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + sourceWithMockedRemoteCall("start_ok.json").doStart(r -> { + assertFalse(r.isTimedOut()); + assertEquals(FAKE_SCROLL_ID, r.getScrollId()); + assertEquals(4, r.getTotalHits()); + assertThat(r.getFailures(), empty()); + assertThat(r.getHits(), hasSize(1)); + assertEquals("test", r.getHits().get(0).getIndex()); + assertEquals("test", r.getHits().get(0).getType()); + assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId()); + assertEquals("{\"test\":\"test2\"}", r.getHits().get(0).getSource().utf8ToString()); + assertNull(r.getHits().get(0).getTTL()); + assertNull(r.getHits().get(0).getTimestamp()); + assertNull(r.getHits().get(0).getRouting()); + called.set(true); + }); + assertTrue(called.get()); + } + + public void testParseScrollOk() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + sourceWithMockedRemoteCall("scroll_ok.json").doStartNextScroll("", timeValueMillis(0), r -> { + assertFalse(r.isTimedOut()); + assertEquals(FAKE_SCROLL_ID, r.getScrollId()); + assertEquals(4, r.getTotalHits()); + assertThat(r.getFailures(), empty()); + assertThat(r.getHits(), hasSize(1)); + assertEquals("test", r.getHits().get(0).getIndex()); + assertEquals("test", r.getHits().get(0).getType()); + assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); + assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); + assertNull(r.getHits().get(0).getTTL()); + assertNull(r.getHits().get(0).getTimestamp()); + assertNull(r.getHits().get(0).getRouting()); + called.set(true); + }); + assertTrue(called.get()); + } + + /** + * Test for parsing _ttl, _timestamp, and _routing. + */ + public void testParseScrollFullyLoaded() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + sourceWithMockedRemoteCall("scroll_fully_loaded.json").doStartNextScroll("", timeValueMillis(0), r -> { + assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); + assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); + assertEquals((Long) 1234L, r.getHits().get(0).getTTL()); + assertEquals((Long) 123444L, r.getHits().get(0).getTimestamp()); + assertEquals("testrouting", r.getHits().get(0).getRouting()); + assertEquals("testparent", r.getHits().get(0).getParent()); + called.set(true); + }); + assertTrue(called.get()); + } + + /** + * Versions of Elasticsearch before 2.1.0 don't support sort:_doc and instead need to use search_type=scan. Scan doesn't return + * documents the first iteration but reindex doesn't like that. So we jump start strait to the next iteration. + */ + public void testScanJumpStart() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + sourceWithMockedRemoteCall("start_scan.json", "scroll_ok.json").doStart(r -> { + assertFalse(r.isTimedOut()); + assertEquals(FAKE_SCROLL_ID, r.getScrollId()); + assertEquals(4, r.getTotalHits()); + assertThat(r.getFailures(), empty()); + assertThat(r.getHits(), hasSize(1)); + assertEquals("test", r.getHits().get(0).getIndex()); + assertEquals("test", r.getHits().get(0).getType()); + assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); + assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); + assertNull(r.getHits().get(0).getTTL()); + assertNull(r.getHits().get(0).getTimestamp()); + assertNull(r.getHits().get(0).getRouting()); + called.set(true); + }); + assertTrue(called.get()); + } + + public void testParseRejection() throws Exception { + // The rejection comes through in the handler because the mocked http response isn't marked as an error + AtomicBoolean called = new AtomicBoolean(); + // Handling a scroll rejection is the same as handling a search rejection so we reuse the verification code + Consumer checkResponse = r -> { + assertFalse(r.isTimedOut()); + assertEquals(FAKE_SCROLL_ID, r.getScrollId()); + assertEquals(4, r.getTotalHits()); + assertThat(r.getFailures(), hasSize(1)); + assertEquals("test", r.getFailures().get(0).getIndex()); + assertEquals((Integer) 0, r.getFailures().get(0).getShardId()); + assertEquals("87A7NvevQxSrEwMbtRCecg", r.getFailures().get(0).getNodeId()); + assertThat(r.getFailures().get(0).getReason(), instanceOf(EsRejectedExecutionException.class)); + assertEquals("rejected execution of org.elasticsearch.transport.TransportService$5@52d06af2 on " + + "EsThreadPoolExecutor[search, queue capacity = 1000, org.elasticsearch.common.util.concurrent." + + "EsThreadPoolExecutor@778ea553[Running, pool size = 7, active threads = 7, queued tasks = 1000, " + + "completed tasks = 4182]]", r.getFailures().get(0).getReason().getMessage()); + assertThat(r.getHits(), hasSize(1)); + assertEquals("test", r.getHits().get(0).getIndex()); + assertEquals("test", r.getHits().get(0).getType()); + assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId()); + assertEquals("{\"test\":\"test1\"}", r.getHits().get(0).getSource().utf8ToString()); + called.set(true); + }; + sourceWithMockedRemoteCall("rejection.json").doStart(checkResponse); + assertTrue(called.get()); + called.set(false); + sourceWithMockedRemoteCall("rejection.json").doStartNextScroll("scroll", timeValueMillis(0), checkResponse); + assertTrue(called.get()); + } + + public void testParseFailureWithStatus() throws Exception { + // The rejection comes through in the handler because the mocked http response isn't marked as an error + AtomicBoolean called = new AtomicBoolean(); + // Handling a scroll rejection is the same as handling a search rejection so we reuse the verification code + Consumer checkResponse = r -> { + assertFalse(r.isTimedOut()); + assertEquals(FAKE_SCROLL_ID, r.getScrollId()); + assertEquals(10000, r.getTotalHits()); + assertThat(r.getFailures(), hasSize(1)); + assertEquals(null, r.getFailures().get(0).getIndex()); + assertEquals(null, r.getFailures().get(0).getShardId()); + assertEquals(null, r.getFailures().get(0).getNodeId()); + assertThat(r.getFailures().get(0).getReason(), instanceOf(RuntimeException.class)); + assertEquals("Unknown remote exception with reason=[SearchContextMissingException[No search context found for id [82]]]", + r.getFailures().get(0).getReason().getMessage()); + assertThat(r.getHits(), hasSize(1)); + assertEquals("test", r.getHits().get(0).getIndex()); + assertEquals("test", r.getHits().get(0).getType()); + assertEquals("10000", r.getHits().get(0).getId()); + assertEquals("{\"test\":\"test10000\"}", r.getHits().get(0).getSource().utf8ToString()); + called.set(true); + }; + sourceWithMockedRemoteCall("failure_with_status.json").doStart(checkResponse); + assertTrue(called.get()); + called.set(false); + sourceWithMockedRemoteCall("failure_with_status.json").doStartNextScroll("scroll", timeValueMillis(0), checkResponse); + assertTrue(called.get()); + } + + public void testParseRequestFailure() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + Consumer checkResponse = r -> { + assertFalse(r.isTimedOut()); + assertNull(r.getScrollId()); + assertEquals(0, r.getTotalHits()); + assertThat(r.getFailures(), hasSize(1)); + assertThat(r.getFailures().get(0).getReason(), instanceOf(ParsingException.class)); + ParsingException failure = (ParsingException) r.getFailures().get(0).getReason(); + assertEquals("Unknown key for a VALUE_STRING in [invalid].", failure.getMessage()); + assertEquals(2, failure.getLineNumber()); + assertEquals(14, failure.getColumnNumber()); + called.set(true); + }; + sourceWithMockedRemoteCall("request_failure.json").doStart(checkResponse); + assertTrue(called.get()); + called.set(false); + sourceWithMockedRemoteCall("request_failure.json").doStartNextScroll("scroll", timeValueMillis(0), checkResponse); + assertTrue(called.get()); + } + + public void testRetryAndSucceed() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + Consumer checkResponse = r -> { + assertThat(r.getFailures(), hasSize(0)); + called.set(true); + }; + retriesAllowed = between(1, Integer.MAX_VALUE); + sourceWithMockedRemoteCall("fail:rejection.json", "start_ok.json").doStart(checkResponse); + assertTrue(called.get()); + assertEquals(1, retries); + retries = 0; + called.set(false); + sourceWithMockedRemoteCall("fail:rejection.json", "scroll_ok.json").doStartNextScroll("scroll", timeValueMillis(0), + checkResponse); + assertTrue(called.get()); + assertEquals(1, retries); + } + + public void testRetryUntilYouRunOutOfTries() throws Exception { + AtomicBoolean called = new AtomicBoolean(); + Consumer checkResponse = r -> called.set(true); + retriesAllowed = between(0, 10); + String[] paths = new String[retriesAllowed + 2]; + for (int i = 0; i < retriesAllowed + 2; i++) { + paths[i] = "fail:rejection.json"; + } + RuntimeException e = expectThrows(RuntimeException.class, () -> sourceWithMockedRemoteCall(paths).doStart(checkResponse)); + assertEquals("failed", e.getMessage()); + assertFalse(called.get()); + assertEquals(retriesAllowed, retries); + retries = 0; + e = expectThrows(RuntimeException.class, + () -> sourceWithMockedRemoteCall(paths).doStartNextScroll("scroll", timeValueMillis(0), checkResponse)); + assertEquals("failed", e.getMessage()); + assertFalse(called.get()); + assertEquals(retriesAllowed, retries); + } + + private RemoteScrollableHitSource sourceWithMockedRemoteCall(String... paths) throws Exception { + return sourceWithMockedRemoteCall(true, paths); + } + + /** + * Creates a hit source that doesn't make the remote request and instead returns data from some files. Also requests are always returned + * synchronously rather than asynchronously. + */ + private RemoteScrollableHitSource sourceWithMockedRemoteCall(boolean mockRemoteVersion, String... paths) throws Exception { + URL[] resources = new URL[paths.length]; + for (int i = 0; i < paths.length; i++) { + resources[i] = Thread.currentThread().getContextClassLoader().getResource("responses/" + paths[i].replace("fail:", "")); + if (resources[i] == null) { + throw new IllegalArgumentException("Couldn't find [" + paths[i] + "]"); + } + } + RemoteScrollableHitSource.AsyncClient client = new RemoteScrollableHitSource.AsyncClient() { + int responseCount = 0; + @Override + public void performRequest(String method, String uri, Map params, HttpEntity entity, + ResponseListener listener) { + try { + URL resource = resources[responseCount]; + String path = paths[responseCount++]; + InputStream stream = resource.openStream(); + if (path.startsWith("fail:")) { + String body = Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)); + if (path.equals("fail:rejection.json")) { + listener.onRetryableFailure(new RuntimeException(body)); + } else { + listener.onFailure(new RuntimeException(body)); + } + } else { + listener.onResponse(stream); + } + } catch (IOException e) { + listener.onFailure(e); + } + } + + @Override + public void close() throws IOException { + } + }; + TestRemoteScrollableHitSource hitSource = new TestRemoteScrollableHitSource(client) { + @Override + void lookupRemoteVersion(Consumer onVersion) { + if (mockRemoteVersion) { + onVersion.accept(Version.CURRENT); + } else { + super.lookupRemoteVersion(onVersion); + } + } + }; + if (mockRemoteVersion) { + hitSource.remoteVersion = Version.CURRENT; + } + return hitSource; + } + + private BackoffPolicy backoff() { + return BackoffPolicy.constantBackoff(timeValueMillis(0), retriesAllowed); + } + + private void countRetry() { + retries += 1; + } + + private void failRequest(Throwable t) { + throw new RuntimeException("failed", t); + } + + private class TestRemoteScrollableHitSource extends RemoteScrollableHitSource { + public TestRemoteScrollableHitSource(RemoteScrollableHitSource.AsyncClient client) { + super(RemoteScrollableHitSourceTests.this.logger, backoff(), RemoteScrollableHitSourceTests.this.threadPool, + RemoteScrollableHitSourceTests.this::countRetry, RemoteScrollableHitSourceTests.this::failRequest, client, + new BytesArray("{}"), RemoteScrollableHitSourceTests.this.searchRequest); + } + } +} diff --git a/modules/reindex/src/test/resources/responses/failure_with_status.json b/modules/reindex/src/test/resources/responses/failure_with_status.json new file mode 100644 index 00000000000..314de37a679 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/failure_with_status.json @@ -0,0 +1,28 @@ +{ + "_scroll_id": "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll", + "took": 3, + "timed_out": false, + "_shards": { + "total": 5, + "successful": 4, + "failed": 1, + "failures": [ { + "status": 404, + "reason": "SearchContextMissingException[No search context found for id [82]]" + } ] + }, + "hits": { + "total": 10000, + "max_score": 0.0, + "hits": [ { + "_index": "test", + "_type": "test", + "_id": "10000", + "_version": 1, + "_score": 0.0, + "_source": { + "test": "test10000" + } + } ] + } +} diff --git a/modules/reindex/src/test/resources/responses/main/0_20_5.json b/modules/reindex/src/test/resources/responses/main/0_20_5.json new file mode 100644 index 00000000000..58ca8e9428f --- /dev/null +++ b/modules/reindex/src/test/resources/responses/main/0_20_5.json @@ -0,0 +1,10 @@ +{ + "ok" : true, + "status" : 200, + "name" : "Techno", + "version" : { + "number" : "0.20.5", + "snapshot_build" : false + }, + "tagline" : "You Know, for Search" +} diff --git a/modules/reindex/src/test/resources/responses/main/0_90_13.json b/modules/reindex/src/test/resources/responses/main/0_90_13.json new file mode 100644 index 00000000000..1b104e068d9 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/main/0_90_13.json @@ -0,0 +1,13 @@ +{ + "ok" : true, + "status" : 200, + "name" : "Mogul of the Mystic Mountain", + "version" : { + "number" : "0.90.13", + "build_hash" : "249c9c5e06765c9e929e92b1d235e1ba4dc679fa", + "build_timestamp" : "2014-03-25T15:27:12Z", + "build_snapshot" : false, + "lucene_version" : "4.6" + }, + "tagline" : "You Know, for Search" +} diff --git a/modules/reindex/src/test/resources/responses/main/1_7_5.json b/modules/reindex/src/test/resources/responses/main/1_7_5.json new file mode 100644 index 00000000000..0fe721defee --- /dev/null +++ b/modules/reindex/src/test/resources/responses/main/1_7_5.json @@ -0,0 +1,13 @@ +{ + "status" : 200, + "name" : "Robert Kelly", + "cluster_name" : "elasticsearch", + "version" : { + "number" : "1.7.5", + "build_hash" : "00f95f4ffca6de89d68b7ccaf80d148f1f70e4d4", + "build_timestamp" : "2016-02-02T09:55:30Z", + "build_snapshot" : false, + "lucene_version" : "4.10.4" + }, + "tagline" : "You Know, for Search" +} diff --git a/modules/reindex/src/test/resources/responses/main/2_3_3.json b/modules/reindex/src/test/resources/responses/main/2_3_3.json new file mode 100644 index 00000000000..8cd90b3b637 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/main/2_3_3.json @@ -0,0 +1,12 @@ +{ + "name" : "Ezekiel Stane", + "cluster_name" : "elasticsearch", + "version" : { + "number" : "2.3.3", + "build_hash" : "218bdf10790eef486ff2c41a3df5cfa32dadcfde", + "build_timestamp" : "2016-05-17T15:40:04Z", + "build_snapshot" : false, + "lucene_version" : "5.5.0" + }, + "tagline" : "You Know, for Search" +} diff --git a/modules/reindex/src/test/resources/responses/main/5_0_0_alpha_3.json b/modules/reindex/src/test/resources/responses/main/5_0_0_alpha_3.json new file mode 100644 index 00000000000..6911f61c3e9 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/main/5_0_0_alpha_3.json @@ -0,0 +1,12 @@ +{ + "name" : "Paibo", + "cluster_name" : "distribution_run", + "version" : { + "number" : "5.0.0-alpha3", + "build_hash" : "42e092f", + "build_date" : "2016-05-26T16:55:45.405Z", + "build_snapshot" : true, + "lucene_version" : "6.0.0" + }, + "tagline" : "You Know, for Search" +} diff --git a/modules/reindex/src/test/resources/responses/rejection.json b/modules/reindex/src/test/resources/responses/rejection.json new file mode 100644 index 00000000000..36120fbf888 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/rejection.json @@ -0,0 +1,34 @@ +{ + "_scroll_id" : "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll", + "took" : 6, + "timed_out" : false, + "_shards" : { + "total" : 5, + "successful" : 4, + "failed" : 1, + "failures" : [ { + "shard" : 0, + "index" : "test", + "node" : "87A7NvevQxSrEwMbtRCecg", + "reason" : { + "type" : "es_rejected_execution_exception", + "reason" : "rejected execution of org.elasticsearch.transport.TransportService$5@52d06af2 on EsThreadPoolExecutor[search, queue capacity = 1000, org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor@778ea553[Running, pool size = 7, active threads = 7, queued tasks = 1000, completed tasks = 4182]]" + } + } ] + }, + "hits" : { + "total" : 4, + "max_score" : null, + "hits" : [ { + "_index" : "test", + "_type" : "test", + "_id" : "AVToMiC250DjIiBO3yJ_", + "_version" : 1, + "_score" : null, + "_source" : { + "test" : "test1" + }, + "sort" : [ 0 ] + } ] + } +} diff --git a/modules/reindex/src/test/resources/responses/request_failure.json b/modules/reindex/src/test/resources/responses/request_failure.json new file mode 100644 index 00000000000..6f6de78c060 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/request_failure.json @@ -0,0 +1,15 @@ +{ + "error" : { + "root_cause" : [ { + "type" : "parsing_exception", + "reason" : "Unknown key for a VALUE_STRING in [invalid].", + "line" : 2, + "col" : 14 + } ], + "type" : "parsing_exception", + "reason" : "Unknown key for a VALUE_STRING in [invalid].", + "line" : 2, + "col" : 14 + }, + "status" : 400 +} diff --git a/modules/reindex/src/test/resources/responses/scroll_fully_loaded.json b/modules/reindex/src/test/resources/responses/scroll_fully_loaded.json new file mode 100644 index 00000000000..a2c1be34e5c --- /dev/null +++ b/modules/reindex/src/test/resources/responses/scroll_fully_loaded.json @@ -0,0 +1,30 @@ +{ + "_scroll_id" : "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll", + "took" : 3, + "timed_out" : false, + "terminated_early" : true, + "_shards" : { + "total" : 5, + "successful" : 5, + "failed" : 0 + }, + "hits" : { + "total" : 4, + "max_score" : null, + "hits" : [ { + "_index" : "test", + "_type" : "test", + "_id" : "AVToMiDL50DjIiBO3yKA", + "_version" : 1, + "_score" : null, + "_source" : { + "test" : "test3" + }, + "sort" : [ 0 ], + "_routing": "testrouting", + "_parent": "testparent", + "_ttl" : 1234, + "_timestamp": 123444 + } ] + } +} diff --git a/modules/reindex/src/test/resources/responses/scroll_ok.json b/modules/reindex/src/test/resources/responses/scroll_ok.json new file mode 100644 index 00000000000..5cdc4a400cb --- /dev/null +++ b/modules/reindex/src/test/resources/responses/scroll_ok.json @@ -0,0 +1,26 @@ +{ + "_scroll_id" : "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll", + "took" : 3, + "timed_out" : false, + "terminated_early" : true, + "_shards" : { + "total" : 5, + "successful" : 5, + "failed" : 0 + }, + "hits" : { + "total" : 4, + "max_score" : null, + "hits" : [ { + "_index" : "test", + "_type" : "test", + "_id" : "AVToMiDL50DjIiBO3yKA", + "_version" : 1, + "_score" : null, + "_source" : { + "test" : "test3" + }, + "sort" : [ 0 ] + } ] + } +} diff --git a/modules/reindex/src/test/resources/responses/start_ok.json b/modules/reindex/src/test/resources/responses/start_ok.json new file mode 100644 index 00000000000..a2988341f8c --- /dev/null +++ b/modules/reindex/src/test/resources/responses/start_ok.json @@ -0,0 +1,25 @@ +{ + "_scroll_id" : "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll", + "took" : 6, + "timed_out" : false, + "_shards" : { + "total" : 5, + "successful" : 5, + "failed" : 0 + }, + "hits" : { + "total" : 4, + "max_score" : null, + "hits" : [ { + "_index" : "test", + "_type" : "test", + "_id" : "AVToMiC250DjIiBO3yJ_", + "_version" : 1, + "_score" : null, + "_source" : { + "test" : "test2" + }, + "sort" : [ 0 ] + } ] + } +} diff --git a/modules/reindex/src/test/resources/responses/start_scan.json b/modules/reindex/src/test/resources/responses/start_scan.json new file mode 100644 index 00000000000..5576d708b30 --- /dev/null +++ b/modules/reindex/src/test/resources/responses/start_scan.json @@ -0,0 +1,15 @@ +{ + "_scroll_id" : "c2Nhbjs1OzQ0Ojd5aUZoUm5hU2lDZ3ZvUHMzMXdGQ1E7NDU6N3lpRmhSbmFTaUNndm9QczMxd0ZDUTs0Mjo3eWlGaFJuYVNpQ2d2b1BzMzF3RkNROzQzOjd5aUZoUm5hU2lDZ3ZvUHMzMXdGQ1E7NDE6N3lpRmhSbmFTaUNndm9QczMxd0ZDUTsxO3RvdGFsX2hpdHM6MTAwMDA7", + "took" : 13, + "timed_out" : false, + "_shards" : { + "total" : 5, + "successful" : 5, + "failed" : 0 + }, + "hits" : { + "total" : 10000, + "max_score" : 0.0, + "hits" : [ ] + } +} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml index 041aa127cd4..e5bf6368eab 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/10_basic.yaml @@ -278,9 +278,6 @@ body: settings: number_of_shards: 1 - - do: - cluster.health: - wait_for_status: yellow - do: index: index: test diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml index 8648c9034ee..8833c844b22 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/20_validation.yaml @@ -59,16 +59,16 @@ --- "source fields may not be modified": - do: - catch: /fields is not supported in this context/ + catch: /stored_fields is not supported in this context/ delete_by_query: index: test body: - fields: [_id] + stored_fields: [_id] --- "requests_per_second cannot be an empty string": - do: - catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + catch: /\[requests_per_second\] must be a float greater than 0. Use -1 to disable throttling./ delete_by_query: requests_per_second: "" index: test @@ -79,7 +79,7 @@ --- "requests_per_second cannot be negative": - do: - catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + catch: /\[requests_per_second\] must be a float greater than 0. Use -1 to disable throttling./ delete_by_query: requests_per_second: -12 index: test @@ -90,10 +90,22 @@ --- "requests_per_second cannot be zero": - do: - catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + catch: /\[requests_per_second\] must be a float greater than 0. Use -1 to disable throttling./ delete_by_query: requests_per_second: 0 index: test body: query: match_all: {} + + +--- +"requests_per_second cannot be unlimited": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use -1 to disable throttling./ + delete_by_query: + requests_per_second: unlimited + index: test + body: + query: + match_all: {} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/40_versioning.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/40_versioning.yaml new file mode 100644 index 00000000000..c81305e2824 --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/40_versioning.yaml @@ -0,0 +1,29 @@ +--- +"delete_by_query fails to delete documents with version number equal to zero": + - do: + index: + index: index1 + type: type1 + id: 1 + version: 0 # Starting version is zero + version_type: external + body: {"delete": 0} + - do: + indices.refresh: {} + + # Delete by query uses internal versioning and will fail here + # because zero is not allowed as a valid version number + - do: + catch: /illegal version value \[0\] for version type \[INTERNAL\]./ + delete_by_query: + index: index1 + refresh: true + body: + query: + match_all: {} + - do: + get: + index: index1 + type: type1 + id: 1 + - match: {_version: 0} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/50_consistency.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/50_consistency.yaml index a69dfdfac04..bc594dce296 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/50_consistency.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/50_consistency.yaml @@ -6,9 +6,6 @@ body: settings: number_of_replicas: 5 - - do: - cluster.health: - wait_for_status: yellow - do: index: index: test diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml index 96cfaa42b5a..14b4ae99eab 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/delete_by_query/70_throttle.yaml @@ -7,9 +7,6 @@ body: settings: number_of_shards: 1 - - do: - cluster.health: - wait_for_status: yellow - do: index: index: test @@ -43,16 +40,13 @@ - lt: {throttled_millis: 4000} --- -"requests_per_second supports unlimited which turns off throttling": +"requests_per_second supports -1 which turns off throttling": - do: indices.create: index: test body: settings: number_of_shards: 1 - - do: - cluster.health: - wait_for_status: yellow - do: index: index: test @@ -75,7 +69,7 @@ delete_by_query: index: test scroll_size: 1 - requests_per_second: unlimited + requests_per_second: -1 body: query: match_all: {} @@ -85,7 +79,7 @@ - match: {throttled_millis: 0} --- -"Rethrottle": +"Rethrottle to -1 which turns off throttling": # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard # and a small batch size on the request - do: @@ -94,9 +88,6 @@ body: settings: number_of_shards: 1 - - do: - cluster.health: - wait_for_status: yellow - do: index: index: test @@ -130,7 +121,7 @@ - do: reindex.rethrottle: - requests_per_second: unlimited + requests_per_second: -1 task_id: $task - do: @@ -157,9 +148,6 @@ body: settings: number_of_shards: 1 - - do: - cluster.health: - wait_for_status: yellow - do: index: index: test diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml index a7a5198e430..cab92310dbd 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml @@ -152,7 +152,7 @@ --- "requests_per_second cannot be an empty string": - do: - catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + catch: /\[requests_per_second\] must be a float greater than 0. Use -1 to disable throttling./ reindex: requests_per_second: "" body: @@ -164,7 +164,7 @@ --- "requests_per_second cannot be negative": - do: - catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + catch: /\[requests_per_second\] must be a float greater than 0. Use -1 to disable throttling./ reindex: requests_per_second: -12 body: @@ -176,7 +176,7 @@ --- "requests_per_second cannot be zero": - do: - catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + catch: /\[requests_per_second\] must be a float greater than 0. Use -1 to disable throttling./ reindex: requests_per_second: 0 body: @@ -185,6 +185,18 @@ dest: index: dest +--- +"requests_per_second cannot be unlimited": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use -1 to disable throttling./ + reindex: + requests_per_second: unlimited + body: + source: + index: test + dest: + index: dest + --- "reindex without source gives useful error message": - do: @@ -216,11 +228,54 @@ --- "source fields may not be modified": - do: - catch: /fields is not supported in this context/ + catch: /stored_fields is not supported in this context/ reindex: body: source: index: test - fields: [_id] + stored_fields: [_id] + dest: + index: dest + +--- +"unwhitelisted remote host fails": + - do: + catch: /\[badremote:9200\] not whitelisted in reindex.remote.whitelist/ + reindex: + body: + source: + remote: + host: http://badremote:9200 + index: test + dest: + index: dest + +--- +"badly formatted remote host fails": + - do: + catch: /\[host\] must be of the form \[scheme\].//\[host\].\[port\]/ + reindex: + body: + source: + remote: + host: badremote + weird: stuff + badkey: is bad + index: test + dest: + index: dest + +--- +"junk in remote fails": + - do: + catch: /Unsupported fields in \[remote\]. \[weird,badkey\]/ + reindex: + body: + source: + remote: + host: http://okremote:9200 + weird: stuff + badkey: is bad + index: test dest: index: dest diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/60_consistency.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/60_consistency.yaml index 323e51b1149..54e6e6df0df 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/60_consistency.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/60_consistency.yaml @@ -6,9 +6,6 @@ body: settings: number_of_replicas: 5 - - do: - cluster.health: - wait_for_status: yellow - do: index: index: src diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/70_throttle.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/70_throttle.yaml index 05d7668ed2e..74ff3f6f615 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/70_throttle.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/70_throttle.yaml @@ -9,9 +9,6 @@ settings: number_of_shards: "1" number_of_replicas: "0" - - do: - cluster.health: - wait_for_status: yellow - do: index: index: source @@ -53,7 +50,7 @@ - is_false: task --- -"requests_per_second supports unlimited to turn off throttling": +"requests_per_second supports -1 to turn off throttling": - do: indices.create: index: source @@ -61,9 +58,6 @@ settings: number_of_shards: "1" number_of_replicas: "0" - - do: - cluster.health: - wait_for_status: yellow - do: index: index: source @@ -87,7 +81,7 @@ - do: reindex: - requests_per_second: unlimited + requests_per_second: -1 body: source: index: source @@ -103,7 +97,7 @@ - is_false: task --- -"Rethrottle": +"Rethrottle to -1 which turns off throttling": # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard # and a small batch size on the request - do: @@ -113,9 +107,6 @@ settings: number_of_shards: "1" number_of_replicas: "0" - - do: - cluster.health: - wait_for_status: yellow - do: index: index: source @@ -152,7 +143,7 @@ - do: reindex.rethrottle: - requests_per_second: unlimited + requests_per_second: -1 task_id: $task - do: @@ -171,9 +162,6 @@ settings: number_of_shards: "1" number_of_replicas: "0" - - do: - cluster.health: - wait_for_status: yellow - do: index: index: source diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml new file mode 100644 index 00000000000..6adac98ad77 --- /dev/null +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/90_remote.yaml @@ -0,0 +1,207 @@ +--- +"Basic reindex from remote": + - do: + index: + index: source + type: foo + id: 1 + body: { "text": "test" } + refresh: true + + # Fetch the http host. We use the host of the master because we know there will always be a master. + - do: + cluster.state: {} + - set: { master_node: master } + - do: + nodes.info: + metric: [ http ] + - is_true: nodes.$master.http.publish_address + - set: {nodes.$master.http.publish_address: host} + - do: + reindex: + refresh: true + body: + source: + remote: + host: http://${host} + index: source + dest: + index: dest + - match: {created: 1} + - match: {updated: 0} + - match: {version_conflicts: 0} + - match: {batches: 1} + - match: {failures: []} + - match: {throttled_millis: 0} + - gte: { took: 0 } + - is_false: task + - is_false: deleted + + - do: + search: + index: dest + body: + query: + match: + text: test + - match: {hits.total: 1} + +--- +"Reindex from remote with query": + - do: + index: + index: source + type: foo + id: 1 + body: { "text": "test" } + - do: + index: + index: source + type: foo + id: 2 + body: { "text": "test2" } + - do: + indices.refresh: {} + + # Fetch the http host. We use the host of the master because we know there will always be a master. + - do: + cluster.state: {} + - set: { master_node: master } + - do: + nodes.info: + metric: [ http ] + - is_true: nodes.$master.http.publish_address + - set: {nodes.$master.http.publish_address: host} + - do: + reindex: + refresh: true + body: + source: + remote: + host: http://${host} + index: source + query: + match: + text: test2 + dest: + index: dest + - match: {created: 1} + + - do: + search: + index: dest + body: + query: + match_all: {} + - match: {hits.total: 1} + +--- +"Reindex from remote with routing": + - do: + index: + index: source + type: foo + id: 1 + body: { "text": "test" } + routing: foo + refresh: true + + # Fetch the http host. We use the host of the master because we know there will always be a master. + - do: + cluster.state: {} + - set: { master_node: master } + - do: + nodes.info: + metric: [ http ] + - is_true: nodes.$master.http.publish_address + - set: {nodes.$master.http.publish_address: host} + - do: + reindex: + refresh: true + body: + source: + remote: + host: http://${host} + index: source + dest: + index: dest + - match: {created: 1} + + - do: + search: + index: dest + routing: foo + body: + query: + match: + text: test + - match: {hits.total: 1} + +--- +"Reindex from remote with parent/child": + - do: + indices.create: + index: source + body: + mappings: + foo: {} + bar: + _parent: + type: foo + - do: + indices.create: + index: dest + body: + mappings: + foo: {} + bar: + _parent: + type: foo + - do: + index: + index: source + type: foo + id: 1 + body: { "text": "test" } + - do: + index: + index: source + type: bar + id: 1 + parent: 1 + body: { "text": "test2" } + - do: + indices.refresh: {} + + # Fetch the http host. We use the host of the master because we know there will always be a master. + - do: + cluster.state: {} + - set: { master_node: master } + - do: + nodes.info: + metric: [ http ] + - is_true: nodes.$master.http.publish_address + - set: {nodes.$master.http.publish_address: host} + - do: + reindex: + refresh: true + body: + source: + remote: + host: http://${host} + index: source + dest: + index: dest + - match: {created: 2} + + - do: + search: + index: dest + body: + query: + has_parent: + parent_type: foo + query: + match: + text: test + - match: {hits.total: 1} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml index 17b2dc77816..784623f714c 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/10_basic.yaml @@ -217,9 +217,6 @@ body: settings: number_of_shards: 1 - - do: - cluster.health: - wait_for_status: yellow - do: index: index: test diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yaml index ea487eb54e0..54eb262e9ba 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/20_validation.yaml @@ -67,16 +67,16 @@ --- "source fields may not be modified": - do: - catch: /fields is not supported in this context/ + catch: /stored_fields is not supported in this context/ update_by_query: index: test body: - fields: [_id] + stored_fields: [_id] --- "requests_per_second cannot be an empty string": - do: - catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + catch: /\[requests_per_second\] must be a float greater than 0. Use -1 to disable throttling./ update_by_query: requests_per_second: "" index: test @@ -84,7 +84,7 @@ --- "requests_per_second cannot be negative": - do: - catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + catch: /\[requests_per_second\] must be a float greater than 0. Use -1 to disable throttling./ update_by_query: requests_per_second: -12 index: test @@ -92,7 +92,15 @@ --- "requests_per_second cannot be zero": - do: - catch: /\[requests_per_second\] must be a float greater than 0. Use "unlimited" to disable throttling./ + catch: /\[requests_per_second\] must be a float greater than 0. Use -1 to disable throttling./ + update_by_query: + requests_per_second: 0 + index: test + +--- +"requests_per_second cannot be unlimited": + - do: + catch: /\[requests_per_second\] must be a float greater than 0. Use -1 to disable throttling./ update_by_query: requests_per_second: 0 index: test diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/30_new_fields.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/30_new_fields.yaml index fc6141cae53..fa2fcae4fa8 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/30_new_fields.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/30_new_fields.yaml @@ -9,9 +9,6 @@ properties: name: type: text - - do: - cluster.health: - wait_for_status: yellow - do: index: index: test diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/40_versioning.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/40_versioning.yaml index ac1cbe4417e..1718714defd 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/40_versioning.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/40_versioning.yaml @@ -21,3 +21,30 @@ type: test id: 1 - match: {_version: 2} + +--- +"update_by_query fails to update documents with version number equal to zero": + - do: + index: + index: index1 + type: type1 + id: 1 + version: 0 # Starting version is zero + version_type: external + body: {"update": 0} + - do: + indices.refresh: {} + + # Update by query uses internal versioning and will fail here + # because zero is not allowed as a valid version number + - do: + catch: /illegal version value \[0\] for version type \[INTERNAL\]./ + update_by_query: + index: index1 + refresh: true + - do: + get: + index: index1 + type: type1 + id: 1 + - match: {_version: 0} diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/50_consistency.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/50_consistency.yaml index 96c1e70c89f..591815fb5c4 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/50_consistency.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/50_consistency.yaml @@ -6,9 +6,6 @@ body: settings: number_of_replicas: 5 - - do: - cluster.health: - wait_for_status: yellow - do: index: index: test diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/60_throttle.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/60_throttle.yaml index 7ecf7000bfd..59ca0976da2 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/60_throttle.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/update_by_query/60_throttle.yaml @@ -7,9 +7,6 @@ body: settings: number_of_shards: 1 - - do: - cluster.health: - wait_for_status: yellow - do: index: index: test @@ -39,16 +36,13 @@ - lt: {throttled_millis: 4000} --- -"requests_per_second supports unlimited which turns off throttling": +"requests_per_second supports -1 which turns off throttling": - do: indices.create: index: test body: settings: number_of_shards: 1 - - do: - cluster.health: - wait_for_status: yellow - do: index: index: test @@ -71,13 +65,13 @@ update_by_query: index: test scroll_size: 1 - requests_per_second: unlimited + requests_per_second: -1 - match: {batches: 3} - match: {updated: 3} - match: {throttled_millis: 0} --- -"Rethrottle": +"Rethrottle to -1 which turns off throttling": # Throttling happens between each scroll batch so we need to control the size of the batch by using a single shard # and a small batch size on the request - do: @@ -86,9 +80,6 @@ body: settings: number_of_shards: 1 - - do: - cluster.health: - wait_for_status: yellow - do: index: index: test @@ -118,7 +109,7 @@ - do: reindex.rethrottle: - requests_per_second: unlimited + requests_per_second: -1 task_id: $task - do: @@ -136,9 +127,6 @@ body: settings: number_of_shards: 1 - - do: - cluster.health: - wait_for_status: yellow - do: index: index: test diff --git a/modules/transport-netty3/build.gradle b/modules/transport-netty3/build.gradle new file mode 100644 index 00000000000..7b8b94a2666 --- /dev/null +++ b/modules/transport-netty3/build.gradle @@ -0,0 +1,118 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + TODOs: + * fix permissions such that only netty3 can open sockets etc? + * fix the hack in the build framework that copies transport-netty3 into the integ test cluster + * maybe figure out a way to run all tests from core with netty3/network? + */ +esplugin { + description 'Netty 3 based transport implementation' + classname 'org.elasticsearch.transport.Netty3Plugin' + hasClientJar = true +} + +compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked" + +dependencies { + // network stack + compile 'io.netty:netty:3.10.6.Final' +} + +thirdPartyAudit.excludes = [ + // uses internal java api: sun.security.x509 (X509CertInfo, X509CertImpl, X500Name) + 'org.jboss.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator', + // classes are missing + + // from org.jboss.netty.handler.codec.protobuf.ProtobufVarint32FrameDecoder (netty) + 'com.google.protobuf.CodedInputStream', + + // from org.jboss.netty.handler.codec.protobuf.ProtobufVarint32LengthFieldPrepender (netty) + 'com.google.protobuf.CodedOutputStream', + + // from org.jboss.netty.handler.codec.protobuf.ProtobufDecoder (netty) + 'com.google.protobuf.ExtensionRegistry', + 'com.google.protobuf.MessageLite$Builder', + 'com.google.protobuf.MessageLite', + 'com.google.protobuf.Parser', + + // from org.jboss.netty.channel.socket.http.HttpTunnelingServlet (netty) + 'javax.servlet.ServletConfig', + 'javax.servlet.ServletException', + 'javax.servlet.ServletOutputStream', + 'javax.servlet.http.HttpServlet', + 'javax.servlet.http.HttpServletRequest', + 'javax.servlet.http.HttpServletResponse', + + // from org.jboss.netty.logging.CommonsLoggerFactory (netty) + 'org.apache.commons.logging.Log', + 'org.apache.commons.logging.LogFactory', + + // from org.jboss.netty.handler.ssl.OpenSslEngine (netty) + 'org.apache.tomcat.jni.Buffer', + 'org.apache.tomcat.jni.Library', + 'org.apache.tomcat.jni.Pool', + 'org.apache.tomcat.jni.SSL', + 'org.apache.tomcat.jni.SSLContext', + + // from org.jboss.netty.handler.ssl.util.BouncyCastleSelfSignedCertGenerator (netty) + 'org.bouncycastle.asn1.x500.X500Name', + 'org.bouncycastle.cert.X509v3CertificateBuilder', + 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', + 'org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder', + 'org.bouncycastle.jce.provider.BouncyCastleProvider', + 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', + + // from org.jboss.netty.handler.ssl.JettyNpnSslEngine (netty) + 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', + 'org.eclipse.jetty.npn.NextProtoNego$ServerProvider', + 'org.eclipse.jetty.npn.NextProtoNego', + + // from org.jboss.netty.logging.JBossLoggerFactory (netty) + 'org.jboss.logging.Logger', + + // from org.jboss.netty.handler.codec.marshalling.ChannelBufferByteInput (netty) + 'org.jboss.marshalling.ByteInput', + + // from org.jboss.netty.handler.codec.marshalling.ChannelBufferByteOutput (netty) + 'org.jboss.marshalling.ByteOutput', + + // from org.jboss.netty.handler.codec.marshalling.CompatibleMarshallingEncoder (netty) + 'org.jboss.marshalling.Marshaller', + + // from org.jboss.netty.handler.codec.marshalling.ContextBoundUnmarshallerProvider (netty) + 'org.jboss.marshalling.MarshallerFactory', + 'org.jboss.marshalling.MarshallingConfiguration', + 'org.jboss.marshalling.Unmarshaller', + + // from org.jboss.netty.container.osgi.NettyBundleActivator (netty) + 'org.osgi.framework.BundleActivator', + 'org.osgi.framework.BundleContext', + + // from org.jboss.netty.logging.OsgiLoggerFactory$1 (netty) + 'org.osgi.framework.ServiceReference', + 'org.osgi.service.log.LogService', + 'org.osgi.util.tracker.ServiceTracker', + 'org.osgi.util.tracker.ServiceTrackerCustomizer', + + // from org.jboss.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', +] diff --git a/modules/transport-netty3/licenses/netty-3.10.6.Final.jar.sha1 b/modules/transport-netty3/licenses/netty-3.10.6.Final.jar.sha1 new file mode 100644 index 00000000000..35872846658 --- /dev/null +++ b/modules/transport-netty3/licenses/netty-3.10.6.Final.jar.sha1 @@ -0,0 +1 @@ +18ed04a0e502896552854926e908509db2987a00 \ No newline at end of file diff --git a/modules/transport-netty3/licenses/netty-LICENSE.txt b/modules/transport-netty3/licenses/netty-LICENSE.txt new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/modules/transport-netty3/licenses/netty-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/distribution/licenses/netty-NOTICE.txt b/modules/transport-netty3/licenses/netty-NOTICE.txt similarity index 99% rename from distribution/licenses/netty-NOTICE.txt rename to modules/transport-netty3/licenses/netty-NOTICE.txt index 5b2e21acea7..5bbf91a14de 100644 --- a/distribution/licenses/netty-NOTICE.txt +++ b/modules/transport-netty3/licenses/netty-NOTICE.txt @@ -114,4 +114,3 @@ framework implementation, which can be obtained at: * license/LICENSE.felix.txt (Apache License 2.0) * HOMEPAGE: * http://felix.apache.org/ - diff --git a/core/src/main/java/org/elasticsearch/http/netty/ESHttpResponseEncoder.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/ESNetty3HttpResponseEncoder.java similarity index 83% rename from core/src/main/java/org/elasticsearch/http/netty/ESHttpResponseEncoder.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/ESNetty3HttpResponseEncoder.java index afa69a2fe02..ecc4e5fb1e0 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/ESHttpResponseEncoder.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/ESNetty3HttpResponseEncoder.java @@ -17,9 +17,9 @@ * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http.netty3; -import org.elasticsearch.transport.netty.NettyUtils; +import org.elasticsearch.transport.netty3.Netty3Utils; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.buffer.CompositeChannelBuffer; @@ -32,18 +32,18 @@ import java.util.List; /** * Wraps a netty {@link HttpResponseEncoder} and makes sure that if the resulting * channel buffer is composite, it will use the correct gathering flag. See more - * at {@link NettyUtils#DEFAULT_GATHERING}. + * at {@link Netty3Utils#DEFAULT_GATHERING}. */ -public class ESHttpResponseEncoder extends HttpResponseEncoder { +public class ESNetty3HttpResponseEncoder extends HttpResponseEncoder { @Override protected Object encode(ChannelHandlerContext ctx, Channel channel, Object msg) throws Exception { Object retVal = super.encode(ctx, channel, msg); if (retVal instanceof CompositeChannelBuffer) { CompositeChannelBuffer ccb = (CompositeChannelBuffer) retVal; - if (ccb.useGathering() != NettyUtils.DEFAULT_GATHERING) { + if (ccb.useGathering() != Netty3Utils.DEFAULT_GATHERING) { List decompose = ccb.decompose(ccb.readerIndex(), ccb.readableBytes()); - return ChannelBuffers.wrappedBuffer(NettyUtils.DEFAULT_GATHERING, + return ChannelBuffers.wrappedBuffer(Netty3Utils.DEFAULT_GATHERING, decompose.toArray(new ChannelBuffer[decompose.size()])); } } diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpChannel.java similarity index 90% rename from core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpChannel.java index c4253df2860..a715abfd877 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpChannel.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpChannel.java @@ -17,17 +17,18 @@ * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http.netty3; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; import org.elasticsearch.common.lease.Releasable; -import org.elasticsearch.transport.netty.NettyUtils; -import org.elasticsearch.http.netty.cors.CorsHandler; -import org.elasticsearch.http.netty.pipelining.OrderedDownstreamChannelEvent; -import org.elasticsearch.http.netty.pipelining.OrderedUpstreamMessageEvent; +import org.elasticsearch.transport.netty3.Netty3Utils; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.http.netty3.cors.Netty3CorsHandler; +import org.elasticsearch.http.netty3.pipelining.OrderedDownstreamChannelEvent; +import org.elasticsearch.http.netty3.pipelining.OrderedUpstreamMessageEvent; import org.elasticsearch.rest.AbstractRestChannel; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; @@ -54,28 +55,30 @@ import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; import static org.jboss.netty.handler.codec.http.HttpHeaders.Values.CLOSE; import static org.jboss.netty.handler.codec.http.HttpHeaders.Values.KEEP_ALIVE; -public final class NettyHttpChannel extends AbstractRestChannel { +public final class Netty3HttpChannel extends AbstractRestChannel { - private final NettyHttpServerTransport transport; + private final Netty3HttpServerTransport transport; private final Channel channel; private final org.jboss.netty.handler.codec.http.HttpRequest nettyRequest; private final OrderedUpstreamMessageEvent orderedUpstreamMessageEvent; + private final ThreadContext threadContext; /** - * @param transport The corresponding NettyHttpServerTransport where this channel belongs to. + * @param transport The corresponding Netty3HttpServerTransport where this channel belongs to. * @param request The request that is handled by this channel. * @param orderedUpstreamMessageEvent If HTTP pipelining is enabled provide the corresponding Netty upstream event. May be null if * HTTP pipelining is disabled. * @param detailedErrorsEnabled true iff error messages should include stack traces. */ - public NettyHttpChannel(NettyHttpServerTransport transport, NettyHttpRequest request, - @Nullable OrderedUpstreamMessageEvent orderedUpstreamMessageEvent, - boolean detailedErrorsEnabled) { + public Netty3HttpChannel(Netty3HttpServerTransport transport, Netty3HttpRequest request, + @Nullable OrderedUpstreamMessageEvent orderedUpstreamMessageEvent, + boolean detailedErrorsEnabled, ThreadContext threadContext) { super(request, detailedErrorsEnabled); this.transport = transport; this.channel = request.getChannel(); this.nettyRequest = request.request(); this.orderedUpstreamMessageEvent = orderedUpstreamMessageEvent; + this.threadContext = threadContext; } @Override @@ -83,7 +86,6 @@ public final class NettyHttpChannel extends AbstractRestChannel { return new ReleasableBytesStreamOutput(transport.bigArrays); } - @Override public void sendResponse(RestResponse response) { // if the response object was created upstream, then use it; @@ -91,7 +93,7 @@ public final class NettyHttpChannel extends AbstractRestChannel { HttpResponse resp = newResponse(); resp.setStatus(getStatus(response.status())); - CorsHandler.setCorsResponseHeaders(nettyRequest, resp, transport.getCorsConfig()); + Netty3CorsHandler.setCorsResponseHeaders(nettyRequest, resp, transport.getCorsConfig()); String opaque = nettyRequest.headers().get("X-Opaque-Id"); if (opaque != null) { @@ -99,13 +101,14 @@ public final class NettyHttpChannel extends AbstractRestChannel { } // Add all custom headers - addCustomHeaders(response, resp); + addCustomHeaders(resp, response.getHeaders()); + addCustomHeaders(resp, threadContext.getResponseHeaders()); BytesReference content = response.content(); ChannelBuffer buffer; boolean addedReleaseListener = false; try { - buffer = NettyUtils.toChannelBuffer(content); + buffer = Netty3Utils.toChannelBuffer(content); resp.setContent(buffer); // If our response doesn't specify a content-type header, set one @@ -170,8 +173,7 @@ public final class NettyHttpChannel extends AbstractRestChannel { } } - private void addCustomHeaders(RestResponse response, HttpResponse resp) { - Map> customHeaders = response.getHeaders(); + private void addCustomHeaders(HttpResponse resp, Map> customHeaders) { if (customHeaders != null) { for (Map.Entry> headerEntry : customHeaders.entrySet()) { for (String headerValue : headerEntry.getValue()) { diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpRequest.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequest.java similarity index 93% rename from core/src/main/java/org/elasticsearch/http/netty/NettyHttpRequest.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequest.java index d62252bc0ce..39a5c94b239 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpRequest.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequest.java @@ -17,11 +17,11 @@ * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http.netty3; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.transport.netty.NettyUtils; +import org.elasticsearch.transport.netty3.Netty3Utils; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.support.RestUtils; import org.jboss.netty.channel.Channel; @@ -34,7 +34,7 @@ import java.util.Map; /** * */ -public class NettyHttpRequest extends RestRequest { +public class Netty3HttpRequest extends RestRequest { private final org.jboss.netty.handler.codec.http.HttpRequest request; private final Channel channel; @@ -42,12 +42,12 @@ public class NettyHttpRequest extends RestRequest { private final String rawPath; private final BytesReference content; - public NettyHttpRequest(org.jboss.netty.handler.codec.http.HttpRequest request, Channel channel) { + public Netty3HttpRequest(org.jboss.netty.handler.codec.http.HttpRequest request, Channel channel) { this.request = request; this.channel = channel; this.params = new HashMap<>(); if (request.getContent().readable()) { - this.content = NettyUtils.toBytesReference(request.getContent()); + this.content = Netty3Utils.toBytesReference(request.getContent()); } else { this.content = BytesArray.EMPTY; } diff --git a/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequestHandler.java similarity index 80% rename from core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequestHandler.java index 376ca738fab..968eb6a24ab 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/HttpRequestHandler.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpRequestHandler.java @@ -17,10 +17,10 @@ * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http.netty3; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.http.netty.pipelining.OrderedUpstreamMessageEvent; +import org.elasticsearch.http.netty3.pipelining.OrderedUpstreamMessageEvent; import org.jboss.netty.channel.ChannelHandler; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ExceptionEvent; @@ -28,18 +28,15 @@ import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.handler.codec.http.HttpRequest; -/** - * - */ @ChannelHandler.Sharable -public class HttpRequestHandler extends SimpleChannelUpstreamHandler { +public class Netty3HttpRequestHandler extends SimpleChannelUpstreamHandler { - private final NettyHttpServerTransport serverTransport; + private final Netty3HttpServerTransport serverTransport; private final boolean httpPipeliningEnabled; private final boolean detailedErrorsEnabled; private final ThreadContext threadContext; - public HttpRequestHandler(NettyHttpServerTransport serverTransport, boolean detailedErrorsEnabled, ThreadContext threadContext) { + public Netty3HttpRequestHandler(Netty3HttpServerTransport serverTransport, boolean detailedErrorsEnabled, ThreadContext threadContext) { this.serverTransport = serverTransport; this.httpPipeliningEnabled = serverTransport.pipelining; this.detailedErrorsEnabled = detailedErrorsEnabled; @@ -60,8 +57,8 @@ public class HttpRequestHandler extends SimpleChannelUpstreamHandler { threadContext.copyHeaders(request.headers()); // the netty HTTP handling always copy over the buffer to its own buffer, either in NioWorker internally // when reading, or using a cumalation buffer - NettyHttpRequest httpRequest = new NettyHttpRequest(request, e.getChannel()); - NettyHttpChannel channel = new NettyHttpChannel(serverTransport, httpRequest, oue, detailedErrorsEnabled); + Netty3HttpRequest httpRequest = new Netty3HttpRequest(request, e.getChannel()); + Netty3HttpChannel channel = new Netty3HttpChannel(serverTransport, httpRequest, oue, detailedErrorsEnabled, threadContext); serverTransport.dispatchRequest(httpRequest, channel); super.messageReceived(ctx, e); } diff --git a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java similarity index 91% rename from core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java index 77c140ce7ce..c480155dceb 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/NettyHttpServerTransport.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/Netty3HttpServerTransport.java @@ -17,15 +17,13 @@ * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http.netty3; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.IntSet; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.transport.netty.NettyUtils; -import org.elasticsearch.transport.netty.OpenChannelsHandler; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; @@ -46,16 +44,18 @@ import org.elasticsearch.http.HttpInfo; import org.elasticsearch.http.HttpServerAdapter; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpStats; -import org.elasticsearch.http.netty.cors.CorsConfig; -import org.elasticsearch.http.netty.cors.CorsConfigBuilder; -import org.elasticsearch.http.netty.cors.CorsHandler; -import org.elasticsearch.http.netty.pipelining.HttpPipeliningHandler; +import org.elasticsearch.http.netty3.cors.Netty3CorsConfig; +import org.elasticsearch.http.netty3.cors.Netty3CorsConfigBuilder; +import org.elasticsearch.http.netty3.cors.Netty3CorsHandler; +import org.elasticsearch.http.netty3.pipelining.HttpPipeliningHandler; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.support.RestUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BindTransportException; +import org.elasticsearch.transport.netty3.Netty3OpenChannelsHandler; +import org.elasticsearch.transport.netty3.Netty3Utils; import org.jboss.netty.bootstrap.ServerBootstrap; import org.jboss.netty.channel.AdaptiveReceiveBufferSizePredictorFactory; import org.jboss.netty.channel.Channel; @@ -108,15 +108,12 @@ import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_PUBLISH_ import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_RESET_COOKIES; import static org.elasticsearch.http.HttpTransportSettings.SETTING_PIPELINING; import static org.elasticsearch.http.HttpTransportSettings.SETTING_PIPELINING_MAX_EVENTS; -import static org.elasticsearch.http.netty.cors.CorsHandler.ANY_ORIGIN; +import static org.elasticsearch.http.netty3.cors.Netty3CorsHandler.ANY_ORIGIN; -/** - * - */ -public class NettyHttpServerTransport extends AbstractLifecycleComponent implements HttpServerTransport { +public class Netty3HttpServerTransport extends AbstractLifecycleComponent implements HttpServerTransport { static { - NettyUtils.setup(); + Netty3Utils.setup(); } public static Setting SETTING_HTTP_NETTY_MAX_CUMULATION_BUFFER_CAPACITY = @@ -211,14 +208,14 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent serverChannels = new ArrayList<>(); // package private for testing - OpenChannelsHandler serverOpenChannels; + Netty3OpenChannelsHandler serverOpenChannels; protected volatile HttpServerAdapter httpServerAdapter; - private final CorsConfig corsConfig; + private final Netty3CorsConfig corsConfig; @Inject - public NettyHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool) { + public Netty3HttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool) { super(settings); this.networkService = networkService; this.bigArrays = bigArrays; @@ -283,20 +280,18 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent> origins; private final Optional pattern; @@ -51,7 +51,7 @@ public final class CorsConfig { private final Map> preflightHeaders; private final boolean shortCircuit; - CorsConfig(final CorsConfigBuilder builder) { + Netty3CorsConfig(final Netty3CorsConfigBuilder builder) { origins = builder.origins.map(s -> new LinkedHashSet<>(s)); pattern = builder.pattern; anyOrigin = builder.anyOrigin; diff --git a/core/src/main/java/org/elasticsearch/http/netty/cors/CorsConfigBuilder.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/cors/Netty3CorsConfigBuilder.java similarity index 75% rename from core/src/main/java/org/elasticsearch/http/netty/cors/CorsConfigBuilder.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/cors/Netty3CorsConfigBuilder.java index 05aa1e6a852..947ec86b161 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/cors/CorsConfigBuilder.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/cors/Netty3CorsConfigBuilder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.http.netty.cors; +package org.elasticsearch.http.netty3.cors; import org.jboss.netty.handler.codec.http.HttpMethod; @@ -33,55 +33,55 @@ import java.util.concurrent.Callable; import java.util.regex.Pattern; /** - * Builder used to configure and build a {@link CorsConfig} instance. + * Builder used to configure and build a {@link Netty3CorsConfig} instance. * * This class was lifted from the Netty project: * https://github.com/netty/netty */ -public final class CorsConfigBuilder { +public final class Netty3CorsConfigBuilder { /** * Creates a Builder instance with it's origin set to '*'. * * @return Builder to support method chaining. */ - public static CorsConfigBuilder forAnyOrigin() { - return new CorsConfigBuilder(); + public static Netty3CorsConfigBuilder forAnyOrigin() { + return new Netty3CorsConfigBuilder(); } /** - * Creates a {@link CorsConfigBuilder} instance with the specified origin. + * Creates a {@link Netty3CorsConfigBuilder} instance with the specified origin. * - * @return {@link CorsConfigBuilder} to support method chaining. + * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public static CorsConfigBuilder forOrigin(final String origin) { + public static Netty3CorsConfigBuilder forOrigin(final String origin) { if ("*".equals(origin)) { - return new CorsConfigBuilder(); + return new Netty3CorsConfigBuilder(); } - return new CorsConfigBuilder(origin); + return new Netty3CorsConfigBuilder(origin); } /** - * Create a {@link CorsConfigBuilder} instance with the specified pattern origin. + * Create a {@link Netty3CorsConfigBuilder} instance with the specified pattern origin. * * @param pattern the regular expression pattern to match incoming origins on. - * @return {@link CorsConfigBuilder} with the configured origin pattern. + * @return {@link Netty3CorsConfigBuilder} with the configured origin pattern. */ - public static CorsConfigBuilder forPattern(final Pattern pattern) { + public static Netty3CorsConfigBuilder forPattern(final Pattern pattern) { if (pattern == null) { throw new IllegalArgumentException("CORS pattern cannot be null"); } - return new CorsConfigBuilder(pattern); + return new Netty3CorsConfigBuilder(pattern); } /** - * Creates a {@link CorsConfigBuilder} instance with the specified origins. + * Creates a {@link Netty3CorsConfigBuilder} instance with the specified origins. * - * @return {@link CorsConfigBuilder} to support method chaining. + * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public static CorsConfigBuilder forOrigins(final String... origins) { - return new CorsConfigBuilder(origins); + public static Netty3CorsConfigBuilder forOrigins(final String... origins) { + return new Netty3CorsConfigBuilder(origins); } Optional> origins; @@ -102,7 +102,7 @@ public final class CorsConfigBuilder { * * @param origins the origin to be used for this builder. */ - CorsConfigBuilder(final String... origins) { + Netty3CorsConfigBuilder(final String... origins) { this.origins = Optional.of(new LinkedHashSet<>(Arrays.asList(origins))); pattern = Optional.empty(); anyOrigin = false; @@ -113,7 +113,7 @@ public final class CorsConfigBuilder { * wildcard origin. * */ - CorsConfigBuilder() { + Netty3CorsConfigBuilder() { anyOrigin = true; origins = Optional.empty(); pattern = Optional.empty(); @@ -124,7 +124,7 @@ public final class CorsConfigBuilder { * * @param pattern the pattern to match against for incoming origins. */ - CorsConfigBuilder(final Pattern pattern) { + Netty3CorsConfigBuilder(final Pattern pattern) { this.pattern = Optional.of(pattern); origins = Optional.empty(); anyOrigin = false; @@ -135,9 +135,9 @@ public final class CorsConfigBuilder { * from the local file system. Calling this method will enable a successful CORS response * with a wildcard for the CORS response header 'Access-Control-Allow-Origin'. * - * @return {@link CorsConfigBuilder} to support method chaining. + * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - CorsConfigBuilder allowNullOrigin() { + Netty3CorsConfigBuilder allowNullOrigin() { allowNullOrigin = true; return this; } @@ -145,9 +145,9 @@ public final class CorsConfigBuilder { /** * Disables CORS support. * - * @return {@link CorsConfigBuilder} to support method chaining. + * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public CorsConfigBuilder disable() { + public Netty3CorsConfigBuilder disable() { enabled = false; return this; } @@ -165,9 +165,9 @@ public final class CorsConfigBuilder { * The default value for 'withCredentials' is false in which case no cookies are sent. * Setting this to true will included cookies in cross origin requests. * - * @return {@link CorsConfigBuilder} to support method chaining. + * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public CorsConfigBuilder allowCredentials() { + public Netty3CorsConfigBuilder allowCredentials() { allowCredentials = true; return this; } @@ -179,9 +179,9 @@ public final class CorsConfigBuilder { * request will be made. * * @param max the maximum time, in seconds, that the preflight response may be cached. - * @return {@link CorsConfigBuilder} to support method chaining. + * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public CorsConfigBuilder maxAge(final long max) { + public Netty3CorsConfigBuilder maxAge(final long max) { maxAge = max; return this; } @@ -191,9 +191,9 @@ public final class CorsConfigBuilder { * CORS 'Access-Control-Request-Method' response header. * * @param methods the {@link HttpMethod}s that should be allowed. - * @return {@link CorsConfigBuilder} to support method chaining. + * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public CorsConfigBuilder allowedRequestMethods(final HttpMethod... methods) { + public Netty3CorsConfigBuilder allowedRequestMethods(final HttpMethod... methods) { requestMethods.addAll(Arrays.asList(methods)); return this; } @@ -212,9 +212,9 @@ public final class CorsConfigBuilder { * if it allow a request). * * @param headers the headers to be added to the preflight 'Access-Control-Allow-Headers' response header. - * @return {@link CorsConfigBuilder} to support method chaining. + * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public CorsConfigBuilder allowedRequestHeaders(final String... headers) { + public Netty3CorsConfigBuilder allowedRequestHeaders(final String... headers) { requestHeaders.addAll(Arrays.asList(headers)); return this; } @@ -227,9 +227,9 @@ public final class CorsConfigBuilder { * * @param name the name of the HTTP header. * @param values the values for the HTTP header. - * @return {@link CorsConfigBuilder} to support method chaining. + * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Object... values) { + public Netty3CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Object... values) { if (values.length == 1) { preflightHeaders.put(name, new ConstantValueGenerator(values[0])); } else { @@ -247,9 +247,9 @@ public final class CorsConfigBuilder { * @param name the name of the HTTP header. * @param value the values for the HTTP header. * @param the type of values that the Iterable contains. - * @return {@link CorsConfigBuilder} to support method chaining. + * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Iterable value) { + public Netty3CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Iterable value) { preflightHeaders.put(name, new ConstantValueGenerator(value)); return this; } @@ -267,9 +267,9 @@ public final class CorsConfigBuilder { * @param name the name of the HTTP header. * @param valueGenerator a Callable which will be invoked at HTTP response creation. * @param the type of the value that the Callable can return. - * @return {@link CorsConfigBuilder} to support method chaining. + * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Callable valueGenerator) { + public Netty3CorsConfigBuilder preflightResponseHeader(final CharSequence name, final Callable valueGenerator) { preflightHeaders.put(name, valueGenerator); return this; } @@ -277,9 +277,9 @@ public final class CorsConfigBuilder { /** * Specifies that no preflight response headers should be added to a preflight response. * - * @return {@link CorsConfigBuilder} to support method chaining. + * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public CorsConfigBuilder noPreflightResponseHeaders() { + public Netty3CorsConfigBuilder noPreflightResponseHeaders() { noPreflightHeaders = true; return this; } @@ -292,24 +292,24 @@ public final class CorsConfigBuilder { * and this setting will check that the Origin is valid and if it is not valid no * further processing will take place, and a error will be returned to the calling client. * - * @return {@link CorsConfigBuilder} to support method chaining. + * @return {@link Netty3CorsConfigBuilder} to support method chaining. */ - public CorsConfigBuilder shortCircuit() { + public Netty3CorsConfigBuilder shortCircuit() { shortCircuit = true; return this; } /** - * Builds a {@link CorsConfig} with settings specified by previous method calls. + * Builds a {@link Netty3CorsConfig} with settings specified by previous method calls. * - * @return {@link CorsConfig} the configured CorsConfig instance. + * @return {@link Netty3CorsConfig} the configured CorsConfig instance. */ - public CorsConfig build() { + public Netty3CorsConfig build() { if (preflightHeaders.isEmpty() && !noPreflightHeaders) { preflightHeaders.put("date", DateValueGenerator.INSTANCE); preflightHeaders.put("content-length", new ConstantValueGenerator("0")); } - return new CorsConfig(this); + return new Netty3CorsConfig(this); } /** diff --git a/core/src/main/java/org/elasticsearch/http/netty/cors/CorsHandler.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/cors/Netty3CorsHandler.java similarity index 95% rename from core/src/main/java/org/elasticsearch/http/netty/cors/CorsHandler.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/cors/Netty3CorsHandler.java index 5d42de780b3..d051e0081c6 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/cors/CorsHandler.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/cors/Netty3CorsHandler.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.http.netty.cors; +package org.elasticsearch.http.netty3.cors; import org.elasticsearch.common.Strings; import org.jboss.netty.channel.ChannelFutureListener; @@ -47,23 +47,23 @@ import static org.jboss.netty.handler.codec.http.HttpResponseStatus.OK; /** * Handles Cross Origin Resource Sharing (CORS) requests. *

- * This handler can be configured using a {@link CorsConfig}, please + * This handler can be configured using a {@link Netty3CorsConfig}, please * refer to this class for details about the configuration options available. * * This code was borrowed from Netty 4 and refactored to work for Elasticsearch's Netty 3 setup. */ -public class CorsHandler extends SimpleChannelUpstreamHandler { +public class Netty3CorsHandler extends SimpleChannelUpstreamHandler { public static final String ANY_ORIGIN = "*"; private static Pattern SCHEME_PATTERN = Pattern.compile("^https?://"); - private final CorsConfig config; + private final Netty3CorsConfig config; private HttpRequest request; /** - * Creates a new instance with the specified {@link CorsConfig}. + * Creates a new instance with the specified {@link Netty3CorsConfig}. */ - public CorsHandler(final CorsConfig config) { + public Netty3CorsHandler(final Netty3CorsConfig config) { if (config == null) { throw new IllegalArgumentException("Config cannot be null"); } @@ -86,7 +86,7 @@ public class CorsHandler extends SimpleChannelUpstreamHandler { super.messageReceived(ctx, e); } - public static void setCorsResponseHeaders(HttpRequest request, HttpResponse resp, CorsConfig config) { + public static void setCorsResponseHeaders(HttpRequest request, HttpResponse resp, Netty3CorsConfig config) { if (!config.isCorsSupportEnabled()) { return; } diff --git a/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/pipelining/HttpPipeliningHandler.java similarity index 99% rename from core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/pipelining/HttpPipeliningHandler.java index c291e591dc1..48886bb5a4d 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandler.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/pipelining/HttpPipeliningHandler.java @@ -1,4 +1,4 @@ -package org.elasticsearch.http.netty.pipelining; +package org.elasticsearch.http.netty3.pipelining; /* * Licensed to Elasticsearch under one or more contributor diff --git a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/pipelining/OrderedDownstreamChannelEvent.java similarity index 98% rename from core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/pipelining/OrderedDownstreamChannelEvent.java index 2485b7082bd..6322dcfde7b 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedDownstreamChannelEvent.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/pipelining/OrderedDownstreamChannelEvent.java @@ -1,4 +1,4 @@ -package org.elasticsearch.http.netty.pipelining; +package org.elasticsearch.http.netty3.pipelining; /* * Licensed to Elasticsearch under one or more contributor diff --git a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedUpstreamMessageEvent.java b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/pipelining/OrderedUpstreamMessageEvent.java similarity index 96% rename from core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedUpstreamMessageEvent.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/pipelining/OrderedUpstreamMessageEvent.java index cc47b5be320..9abb0bd4f1a 100644 --- a/core/src/main/java/org/elasticsearch/http/netty/pipelining/OrderedUpstreamMessageEvent.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/http/netty3/pipelining/OrderedUpstreamMessageEvent.java @@ -1,4 +1,4 @@ -package org.elasticsearch.http.netty.pipelining; +package org.elasticsearch.http.netty3.pipelining; /* * Licensed to Elasticsearch under one or more contributor diff --git a/modules/transport-netty3/src/main/java/org/elasticsearch/transport/Netty3Plugin.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/Netty3Plugin.java new file mode 100644 index 00000000000..0e8a6d10bcb --- /dev/null +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/Netty3Plugin.java @@ -0,0 +1,102 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.http.netty3.Netty3HttpServerTransport; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.transport.netty3.Netty3Transport; + +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Arrays; +import java.util.List; + +public class Netty3Plugin extends Plugin { + public static final String NETTY_TRANSPORT_NAME = "netty3"; + public static final String NETTY_HTTP_TRANSPORT_NAME = "netty3"; + + public Netty3Plugin(Settings settings) { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + AccessController.doPrivileged((PrivilegedAction) () -> { + try { + Class.forName("org.jboss.netty.channel.socket.nio.SelectorUtil"); + } catch (ClassNotFoundException e) { + throw new AssertionError(e); // we don't do anything with this + } + return null; + }); + /* + * Asserts that sun.nio.ch.bugLevel has been set to a non-null value. This assertion will fail if the corresponding code + * is not executed in a doPrivileged block. This can be disabled via `netty.assert.buglevel` setting which isn't registered + * by default but test can do so if they depend on the jar instead of the module. + */ + //TODO Once we have no jar level dependency we can get rid of this. + if (settings.getAsBoolean("netty.assert.buglevel", true)) { + assert System.getProperty("sun.nio.ch.bugLevel") != null : + "sun.nio.ch.bugLevel is null somebody pulls in SelectorUtil without doing stuff in a doPrivileged block?"; + } + } + + @Override + public List> getSettings() { + return Arrays.asList( + Netty3HttpServerTransport.SETTING_HTTP_NETTY_MAX_CUMULATION_BUFFER_CAPACITY, + Netty3HttpServerTransport.SETTING_HTTP_NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS, + Netty3HttpServerTransport.SETTING_HTTP_WORKER_COUNT, + Netty3HttpServerTransport.SETTING_HTTP_TCP_NO_DELAY, + Netty3HttpServerTransport.SETTING_HTTP_TCP_KEEP_ALIVE, + Netty3HttpServerTransport.SETTING_HTTP_TCP_BLOCKING_SERVER, + Netty3HttpServerTransport.SETTING_HTTP_TCP_REUSE_ADDRESS, + Netty3HttpServerTransport.SETTING_HTTP_TCP_SEND_BUFFER_SIZE, + Netty3HttpServerTransport.SETTING_HTTP_TCP_RECEIVE_BUFFER_SIZE, + Netty3Transport.WORKER_COUNT, + Netty3Transport.NETTY_MAX_CUMULATION_BUFFER_CAPACITY, + Netty3Transport.NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS, + Netty3Transport.NETTY_RECEIVE_PREDICTOR_SIZE, + Netty3Transport.NETTY_RECEIVE_PREDICTOR_MIN, + Netty3Transport.NETTY_RECEIVE_PREDICTOR_MAX, + Netty3Transport.NETTY_BOSS_COUNT + ); + } + + @Override + public Settings additionalSettings() { + return Settings.builder() + // here we set the netty3 transport and http transport as the default. This is a set once setting + // ie. if another plugin does that as well the server will fail - only one default network can exist! + .put(NetworkModule.HTTP_DEFAULT_TYPE_SETTING.getKey(), NETTY_HTTP_TRANSPORT_NAME) + .put(NetworkModule.TRANSPORT_DEFAULT_TYPE_SETTING.getKey(), NETTY_TRANSPORT_NAME) + .build(); + } + + public void onModule(NetworkModule networkModule) { + if (networkModule.canRegisterHttpExtensions()) { + networkModule.registerHttpTransport(NETTY_HTTP_TRANSPORT_NAME, Netty3HttpServerTransport.class); + } + networkModule.registerTransport(NETTY_TRANSPORT_NAME, Netty3Transport.class); + } +} diff --git a/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/ChannelBufferBytesReference.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/ChannelBufferBytesReference.java new file mode 100644 index 00000000000..ef4c0fb1933 --- /dev/null +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/ChannelBufferBytesReference.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport.netty3; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.jboss.netty.buffer.ChannelBuffer; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; + +final class ChannelBufferBytesReference extends BytesReference { + + private final ChannelBuffer buffer; + private final int length; + private final int offset; + + ChannelBufferBytesReference(ChannelBuffer buffer, int length) { + this.buffer = buffer; + this.length = length; + this.offset = buffer.readerIndex(); + assert length <= buffer.readableBytes() : "length[" + length +"] > " + buffer.readableBytes(); + } + + @Override + public byte get(int index) { + return buffer.getByte(offset + index); + } + + @Override + public int length() { + return length; + } + + @Override + public BytesReference slice(int from, int length) { + return new ChannelBufferBytesReference(buffer.slice(offset + from, length), length); + } + + @Override + public StreamInput streamInput() { + return new ChannelBufferStreamInput(buffer.duplicate(), length); + } + + @Override + public void writeTo(OutputStream os) throws IOException { + buffer.getBytes(offset, os, length); + } + + ChannelBuffer toChannelBuffer() { + return buffer.duplicate(); + } + + @Override + public String utf8ToString() { + return buffer.toString(offset, length, StandardCharsets.UTF_8); + } + + @Override + public BytesRef toBytesRef() { + if (buffer.hasArray()) { + return new BytesRef(buffer.array(), buffer.arrayOffset() + offset, length); + } + final byte[] copy = new byte[length]; + buffer.getBytes(offset, copy); + return new BytesRef(copy); + } + + @Override + public long ramBytesUsed() { + return buffer.capacity(); + } +} diff --git a/core/src/main/java/org/elasticsearch/transport/netty/ChannelBufferStreamInput.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/ChannelBufferStreamInput.java similarity index 96% rename from core/src/main/java/org/elasticsearch/transport/netty/ChannelBufferStreamInput.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/ChannelBufferStreamInput.java index 3b95ddd74c7..299e4216b4d 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/ChannelBufferStreamInput.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/ChannelBufferStreamInput.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesReference; @@ -47,7 +47,7 @@ class ChannelBufferStreamInput extends StreamInput { @Override public BytesReference readBytesReference(int length) throws IOException { - BytesReference ref = NettyUtils.toBytesReference(buffer.slice(buffer.readerIndex(), length)); + BytesReference ref = Netty3Utils.toBytesReference(buffer.slice(buffer.readerIndex(), length)); buffer.skipBytes(length); return ref; } diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyInternalESLogger.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3InternalESLogger.java similarity index 93% rename from core/src/main/java/org/elasticsearch/transport/netty/NettyInternalESLogger.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3InternalESLogger.java index d8307f32244..6ff941c48e7 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyInternalESLogger.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3InternalESLogger.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; import org.elasticsearch.common.SuppressLoggerChecks; import org.elasticsearch.common.logging.ESLogger; @@ -27,11 +27,11 @@ import org.jboss.netty.logging.AbstractInternalLogger; * */ @SuppressLoggerChecks(reason = "safely delegates to logger") -final class NettyInternalESLogger extends AbstractInternalLogger { +final class Netty3InternalESLogger extends AbstractInternalLogger { private final ESLogger logger; - NettyInternalESLogger(ESLogger logger) { + Netty3InternalESLogger(ESLogger logger) { this.logger = logger; } diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyMessageChannelHandler.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3MessageChannelHandler.java similarity index 88% rename from core/src/main/java/org/elasticsearch/transport/netty/NettyMessageChannelHandler.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3MessageChannelHandler.java index 0f2805459c9..ff4dc27e2c8 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyMessageChannelHandler.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3MessageChannelHandler.java @@ -17,12 +17,10 @@ * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.transport.TcpHeader; -import org.elasticsearch.transport.TcpTransport; -import org.elasticsearch.transport.TcpTransportChannel; import org.elasticsearch.transport.TransportServiceAdapter; import org.elasticsearch.transport.Transports; import org.jboss.netty.buffer.ChannelBuffer; @@ -38,13 +36,13 @@ import java.net.InetSocketAddress; * A handler (must be the last one!) that does size based frame decoding and forwards the actual message * to the relevant action. */ -class NettyMessageChannelHandler extends SimpleChannelUpstreamHandler { +class Netty3MessageChannelHandler extends SimpleChannelUpstreamHandler { protected final TransportServiceAdapter transportServiceAdapter; - protected final NettyTransport transport; + protected final Netty3Transport transport; protected final String profileName; - NettyMessageChannelHandler(NettyTransport transport, String profileName) { + Netty3MessageChannelHandler(Netty3Transport transport, String profileName) { this.transportServiceAdapter = transport.transportServiceAdapter(); this.transport = transport; this.profileName = profileName; @@ -71,7 +69,7 @@ class NettyMessageChannelHandler extends SimpleChannelUpstreamHandler { try { // netty always copies a buffer, either in NioWorker in its read handler, where it copies to a fresh // buffer, or in the cumulation buffer, which is cleaned each time so it could be bigger than the actual size - BytesReference reference = NettyUtils.toBytesReference(buffer, remainingMessageSize); + BytesReference reference = Netty3Utils.toBytesReference(buffer, remainingMessageSize); transport.messageReceived(reference, ctx.getChannel(), profileName, remoteAddress, remainingMessageSize); } finally { // Set the expected position of the buffer, no matter what happened diff --git a/core/src/main/java/org/elasticsearch/transport/netty/OpenChannelsHandler.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3OpenChannelsHandler.java similarity index 94% rename from core/src/main/java/org/elasticsearch/transport/netty/OpenChannelsHandler.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3OpenChannelsHandler.java index df7cd73a42b..6a7732723c4 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/OpenChannelsHandler.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3OpenChannelsHandler.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.logging.ESLogger; @@ -33,14 +33,10 @@ import org.jboss.netty.channel.ChannelState; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.ChannelUpstreamHandler; -import java.io.Closeable; import java.util.Set; -/** - * - */ @ChannelHandler.Sharable -public class OpenChannelsHandler implements ChannelUpstreamHandler, Releasable { +public class Netty3OpenChannelsHandler implements ChannelUpstreamHandler, Releasable { final Set openChannels = ConcurrentCollections.newConcurrentSet(); final CounterMetric openChannelsMetric = new CounterMetric(); @@ -48,7 +44,7 @@ public class OpenChannelsHandler implements ChannelUpstreamHandler, Releasable { final ESLogger logger; - public OpenChannelsHandler(ESLogger logger) { + public Netty3OpenChannelsHandler(ESLogger logger) { this.logger = logger; } diff --git a/core/src/main/java/org/elasticsearch/transport/netty/SizeHeaderFrameDecoder.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3SizeHeaderFrameDecoder.java similarity index 91% rename from core/src/main/java/org/elasticsearch/transport/netty/SizeHeaderFrameDecoder.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3SizeHeaderFrameDecoder.java index d098fae6c78..487613943e4 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/SizeHeaderFrameDecoder.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3SizeHeaderFrameDecoder.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; import org.elasticsearch.transport.TcpHeader; import org.elasticsearch.transport.TcpTransport; @@ -27,14 +27,12 @@ import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.handler.codec.frame.FrameDecoder; import org.jboss.netty.handler.codec.frame.TooLongFrameException; -/** - */ -final class SizeHeaderFrameDecoder extends FrameDecoder { +final class Netty3SizeHeaderFrameDecoder extends FrameDecoder { @Override protected Object decode(ChannelHandlerContext ctx, Channel channel, ChannelBuffer buffer) throws Exception { try { - boolean continueProcessing = TcpTransport.validateMessageHeader(NettyUtils.toBytesReference(buffer)); + boolean continueProcessing = TcpTransport.validateMessageHeader(Netty3Utils.toBytesReference(buffer)); buffer.skipBytes(TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE); return continueProcessing ? buffer : null; } catch (IllegalArgumentException ex) { diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Transport.java similarity index 70% rename from core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Transport.java index 3c75f68eeb0..5ca2252cf2c 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyTransport.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Transport.java @@ -17,17 +17,14 @@ * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; -import org.elasticsearch.Version; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.bytes.ReleasablePagedBytesReference; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.ReleasableBytesStream; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.network.NetworkService.TcpSettings; @@ -42,17 +39,12 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TcpTransport; -import org.elasticsearch.transport.TransportMessage; -import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportServiceAdapter; import org.elasticsearch.transport.TransportSettings; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.bootstrap.ServerBootstrap; -import org.jboss.netty.buffer.ChannelBuffer; -import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.AdaptiveReceiveBufferSizePredictorFactory; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; @@ -74,8 +66,8 @@ import org.jboss.netty.util.HashedWheelTimer; import java.io.IOException; import java.net.InetSocketAddress; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.Executors; @@ -92,10 +84,10 @@ import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadF * longer. Med is for the typical search / single doc index. And High for things like cluster state. Ping is reserved for * sending out ping requests to other nodes. */ -public class NettyTransport extends TcpTransport { +public class Netty3Transport extends TcpTransport { static { - NettyUtils.setup(); + Netty3Utils.setup(); } public static final Setting WORKER_COUNT = @@ -135,14 +127,14 @@ public class NettyTransport extends TcpTransport { protected final ByteSizeValue receivePredictorMin; protected final ByteSizeValue receivePredictorMax; // package private for testing - volatile OpenChannelsHandler serverOpenChannels; + volatile Netty3OpenChannelsHandler serverOpenChannels; protected volatile ClientBootstrap clientBootstrap; protected final Map serverBootstraps = newConcurrentMap(); @Inject - public NettyTransport(Settings settings, ThreadPool threadPool, NetworkService networkService, BigArrays bigArrays, - NamedWriteableRegistry namedWriteableRegistry, CircuitBreakerService circuitBreakerService) { - super("netty", settings, threadPool, bigArrays, circuitBreakerService, namedWriteableRegistry, networkService); + public Netty3Transport(Settings settings, ThreadPool threadPool, NetworkService networkService, BigArrays bigArrays, + NamedWriteableRegistry namedWriteableRegistry, CircuitBreakerService circuitBreakerService) { + super("netty3", settings, threadPool, bigArrays, circuitBreakerService, namedWriteableRegistry, networkService); this.workerCount = WORKER_COUNT.get(settings); this.maxCumulationBufferCapacity = NETTY_MAX_CUMULATION_BUFFER_CAPACITY.get(settings); this.maxCompositeBufferComponents = NETTY_MAX_COMPOSITE_BUFFER_COMPONENTS.get(settings); @@ -168,7 +160,7 @@ public class NettyTransport extends TcpTransport { try { clientBootstrap = createClientBootstrap(); if (NetworkService.NETWORK_SERVER.get(settings)) { - final OpenChannelsHandler openChannels = new OpenChannelsHandler(logger); + final Netty3OpenChannelsHandler openChannels = new Netty3OpenChannelsHandler(logger); this.serverOpenChannels = openChannels; // loop through all profiles and start them up, special handling for default one for (Map.Entry entry : buildProfileSettings().entrySet()) { @@ -190,6 +182,7 @@ public class NettyTransport extends TcpTransport { } private ClientBootstrap createClientBootstrap() { + // this doPrivileged is for SelectorUtil.java that tries to set "sun.nio.ch.bugLevel" if (blockingClient) { clientBootstrap = new ClientBootstrap(new OioClientSocketChannelFactory( Executors.newCachedThreadPool(daemonThreadFactory(settings, TRANSPORT_CLIENT_WORKER_THREAD_NAME_PREFIX)))); @@ -288,7 +281,7 @@ public class NettyTransport extends TcpTransport { final ThreadFactory bossFactory = daemonThreadFactory(this.settings, HTTP_SERVER_BOSS_THREAD_NAME_PREFIX, name); final ThreadFactory workerFactory = daemonThreadFactory(this.settings, HTTP_SERVER_WORKER_THREAD_NAME_PREFIX, name); - ServerBootstrap serverBootstrap; + final ServerBootstrap serverBootstrap; if (blockingServer) { serverBootstrap = new ServerBootstrap(new OioServerSocketChannelFactory( Executors.newCachedThreadPool(bossFactory), @@ -321,12 +314,15 @@ public class NettyTransport extends TcpTransport { } protected final void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception { - onException(ctx.getChannel(), e.getCause()); + onException( + ctx.getChannel(), + e.getCause() == null || e.getCause() instanceof Exception ? + (Exception)e.getCause() : new ElasticsearchException(e.getCause())); } @Override public long serverOpen() { - OpenChannelsHandler channels = serverOpenChannels; + Netty3OpenChannelsHandler channels = serverOpenChannels; return channels == null ? 0 : channels.numberOfOpenChannels(); } @@ -342,81 +338,33 @@ public class NettyTransport extends TcpTransport { channels[0].getCloseFuture().addListener(new ChannelCloseListener(node)); return new NodeChannels(channels, channels, channels, channels, channels); } - - protected NodeChannels connectToChannels(DiscoveryNode node) { + protected NodeChannels connectToChannels(DiscoveryNode node) throws IOException { final NodeChannels nodeChannels = new NodeChannels(new Channel[connectionsPerNodeRecovery], new Channel[connectionsPerNodeBulk], new Channel[connectionsPerNodeReg], new Channel[connectionsPerNodeState], new Channel[connectionsPerNodePing]); boolean success = false; try { - ChannelFuture[] connectRecovery = new ChannelFuture[nodeChannels.recovery.length]; - ChannelFuture[] connectBulk = new ChannelFuture[nodeChannels.bulk.length]; - ChannelFuture[] connectReg = new ChannelFuture[nodeChannels.reg.length]; - ChannelFuture[] connectState = new ChannelFuture[nodeChannels.state.length]; - ChannelFuture[] connectPing = new ChannelFuture[nodeChannels.ping.length]; + int numConnections = connectionsPerNodeBulk + connectionsPerNodePing + connectionsPerNodeRecovery + connectionsPerNodeReg + + connectionsPerNodeState; + ArrayList connections = new ArrayList<>(); InetSocketAddress address = ((InetSocketTransportAddress) node.getAddress()).address(); - for (int i = 0; i < connectRecovery.length; i++) { - connectRecovery[i] = clientBootstrap.connect(address); + for (int i = 0; i < numConnections; i++) { + connections.add(clientBootstrap.connect(address)); } - for (int i = 0; i < connectBulk.length; i++) { - connectBulk[i] = clientBootstrap.connect(address); - } - for (int i = 0; i < connectReg.length; i++) { - connectReg[i] = clientBootstrap.connect(address); - } - for (int i = 0; i < connectState.length; i++) { - connectState[i] = clientBootstrap.connect(address); - } - for (int i = 0; i < connectPing.length; i++) { - connectPing[i] = clientBootstrap.connect(address); - } - + final Iterator iterator = connections.iterator(); try { - for (int i = 0; i < connectRecovery.length; i++) { - connectRecovery[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5)); - if (!connectRecovery[i].isSuccess()) { - throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectRecovery[i].getCause()); + for (Channel[] channels : nodeChannels.getChannelArrays()) { + for (int i = 0; i < channels.length; i++) { + assert iterator.hasNext(); + ChannelFuture future = iterator.next(); + future.awaitUninterruptibly((long) (connectTimeout.millis() * 1.5)); + if (!future.isSuccess()) { + throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", future.getCause()); + } + channels[i] = future.getChannel(); + channels[i].getCloseFuture().addListener(new ChannelCloseListener(node)); } - nodeChannels.recovery[i] = connectRecovery[i].getChannel(); - nodeChannels.recovery[i].getCloseFuture().addListener(new ChannelCloseListener(node)); } - - for (int i = 0; i < connectBulk.length; i++) { - connectBulk[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5)); - if (!connectBulk[i].isSuccess()) { - throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectBulk[i].getCause()); - } - nodeChannels.bulk[i] = connectBulk[i].getChannel(); - nodeChannels.bulk[i].getCloseFuture().addListener(new ChannelCloseListener(node)); - } - - for (int i = 0; i < connectReg.length; i++) { - connectReg[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5)); - if (!connectReg[i].isSuccess()) { - throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectReg[i].getCause()); - } - nodeChannels.reg[i] = connectReg[i].getChannel(); - nodeChannels.reg[i].getCloseFuture().addListener(new ChannelCloseListener(node)); - } - - for (int i = 0; i < connectState.length; i++) { - connectState[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5)); - if (!connectState[i].isSuccess()) { - throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectState[i].getCause()); - } - nodeChannels.state[i] = connectState[i].getChannel(); - nodeChannels.state[i].getCloseFuture().addListener(new ChannelCloseListener(node)); - } - - for (int i = 0; i < connectPing.length; i++) { - connectPing[i].awaitUninterruptibly((long) (connectTimeout.millis() * 1.5)); - if (!connectPing[i].isSuccess()) { - throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connectPing[i].getCause()); - } - nodeChannels.ping[i] = connectPing[i].getChannel(); - nodeChannels.ping[i].getCloseFuture().addListener(new ChannelCloseListener(node)); - } - if (nodeChannels.recovery.length == 0) { if (nodeChannels.bulk.length > 0) { nodeChannels.recovery = nodeChannels.bulk; @@ -428,14 +376,7 @@ public class NettyTransport extends TcpTransport { nodeChannels.bulk = nodeChannels.reg; } } catch (RuntimeException e) { - // clean the futures - List futures = new ArrayList<>(); - futures.addAll(Arrays.asList(connectRecovery)); - futures.addAll(Arrays.asList(connectBulk)); - futures.addAll(Arrays.asList(connectReg)); - futures.addAll(Arrays.asList(connectState)); - futures.addAll(Arrays.asList(connectPing)); - for (ChannelFuture future : Collections.unmodifiableList(futures)) { + for (ChannelFuture future : Collections.unmodifiableList(connections)) { future.cancel(); if (future.getChannel() != null && future.getChannel().isOpen()) { try { @@ -461,16 +402,16 @@ public class NettyTransport extends TcpTransport { } protected static class ClientChannelPipelineFactory implements ChannelPipelineFactory { - protected final NettyTransport nettyTransport; + protected final Netty3Transport nettyTransport; - public ClientChannelPipelineFactory(NettyTransport nettyTransport) { + public ClientChannelPipelineFactory(Netty3Transport nettyTransport) { this.nettyTransport = nettyTransport; } @Override public ChannelPipeline getPipeline() throws Exception { ChannelPipeline channelPipeline = Channels.pipeline(); - SizeHeaderFrameDecoder sizeHeader = new SizeHeaderFrameDecoder(); + Netty3SizeHeaderFrameDecoder sizeHeader = new Netty3SizeHeaderFrameDecoder(); if (nettyTransport.maxCumulationBufferCapacity.bytes() >= 0) { if (nettyTransport.maxCumulationBufferCapacity.bytes() > Integer.MAX_VALUE) { sizeHeader.setMaxCumulationBufferCapacity(Integer.MAX_VALUE); @@ -483,7 +424,7 @@ public class NettyTransport extends TcpTransport { } channelPipeline.addLast("size", sizeHeader); // using a dot as a prefix means, this cannot come from any settings parsed - channelPipeline.addLast("dispatcher", new NettyMessageChannelHandler(nettyTransport, ".client")); + channelPipeline.addLast("dispatcher", new Netty3MessageChannelHandler(nettyTransport, ".client")); return channelPipeline; } } @@ -494,11 +435,11 @@ public class NettyTransport extends TcpTransport { protected static class ServerChannelPipelineFactory implements ChannelPipelineFactory { - protected final NettyTransport nettyTransport; + protected final Netty3Transport nettyTransport; protected final String name; protected final Settings settings; - public ServerChannelPipelineFactory(NettyTransport nettyTransport, String name, Settings settings) { + public ServerChannelPipelineFactory(Netty3Transport nettyTransport, String name, Settings settings) { this.nettyTransport = nettyTransport; this.name = name; this.settings = settings; @@ -508,7 +449,7 @@ public class NettyTransport extends TcpTransport { public ChannelPipeline getPipeline() throws Exception { ChannelPipeline channelPipeline = Channels.pipeline(); channelPipeline.addLast("openChannels", nettyTransport.serverOpenChannels); - SizeHeaderFrameDecoder sizeHeader = new SizeHeaderFrameDecoder(); + Netty3SizeHeaderFrameDecoder sizeHeader = new Netty3SizeHeaderFrameDecoder(); if (nettyTransport.maxCumulationBufferCapacity.bytes() > 0) { if (nettyTransport.maxCumulationBufferCapacity.bytes() > Integer.MAX_VALUE) { sizeHeader.setMaxCumulationBufferCapacity(Integer.MAX_VALUE); @@ -520,7 +461,7 @@ public class NettyTransport extends TcpTransport { sizeHeader.setMaxCumulationBufferComponents(nettyTransport.maxCompositeBufferComponents); } channelPipeline.addLast("size", sizeHeader); - channelPipeline.addLast("dispatcher", new NettyMessageChannelHandler(nettyTransport, name)); + channelPipeline.addLast("dispatcher", new Netty3MessageChannelHandler(nettyTransport, name)); return channelPipeline; } } @@ -537,13 +478,16 @@ public class NettyTransport extends TcpTransport { public void operationComplete(final ChannelFuture future) throws Exception { NodeChannels nodeChannels = connectedNodes.get(node); if (nodeChannels != null && nodeChannels.hasChannel(future.getChannel())) { - threadPool.generic().execute(() -> disconnectFromNode(node, future.getChannel(), "channel closed event")); + threadPool.generic().execute(() -> { + disconnectFromNode(node, future.getChannel(), "channel closed event"); + }); } } } + @Override protected void sendMessage(Channel channel, BytesReference reference, Runnable sendListener, boolean close) { - final ChannelFuture future = channel.write(NettyUtils.toChannelBuffer(reference)); + final ChannelFuture future = channel.write(Netty3Utils.toChannelBuffer(reference)); if (close) { future.addListener(f -> { try { @@ -602,8 +546,8 @@ public class NettyTransport extends TcpTransport { ServerBootstrap serverBootstrap = entry.getValue(); try { serverBootstrap.releaseExternalResources(); - } catch (Throwable t) { - logger.debug("Error closing serverBootstrap for profile [{}]", t, name); + } catch (Exception e) { + logger.debug("Error closing serverBootstrap for profile [{}]", e, name); } } serverBootstraps.clear(); @@ -613,93 +557,4 @@ public class NettyTransport extends TcpTransport { } }); } - - @Override - public Message prepareSend(Version nodeVersion, TransportMessage message, StreamOutput stream, - ReleasableBytesStream writtenBytes) throws IOException { - // it might be nice to somehow generalize this optimization, maybe a smart "paged" bytes output - // that create paged channel buffers, but its tricky to know when to do it (where this option is - // more explicit). - if (message instanceof BytesTransportRequest) { - BytesTransportRequest bRequest = (BytesTransportRequest) message; - assert nodeVersion.equals(bRequest.version()); - bRequest.writeThin(stream); - stream.close(); - ReleasablePagedBytesReference bytes = writtenBytes.bytes(); - ChannelBuffer headerBuffer = NettyUtils.toChannelBuffer(bytes); - ChannelBuffer contentBuffer = NettyUtils.toChannelBuffer(bRequest.bytes()); - ChannelBuffer buffer = ChannelBuffers.wrappedBuffer(NettyUtils.DEFAULT_GATHERING, headerBuffer, contentBuffer); - return new NettyMessage(buffer); - } else { - return super.prepareSend(nodeVersion, message, stream, writtenBytes); - } - } - - @Override - public Message prepareSend(Version nodeVersion, BytesReference bytesReference) { - return new NettyMessage(NettyUtils.toChannelBuffer(bytesReference)); - } - - @Override - public boolean canCompress(TransportRequest request) { - return super.canCompress(request) && (!(request instanceof BytesTransportRequest)); - } - - private class NettyMessage implements Message { - private final ChannelBuffer buffer; - - public NettyMessage(ChannelBuffer buffer) { - this.buffer = buffer; - } - - public StreamOutput getHeaderOutput() { - return new ChannelBufferStreamOutput(buffer); - } - - public int size() { - return buffer.readableBytes(); - } - - @Override - public void send(Channel channel, Runnable onRequestSent) { - ChannelFuture future = channel.write(buffer); - ChannelFutureListener channelFutureListener = f -> onRequestSent.run(); - future.addListener(channelFutureListener); - } - } - - private final static class ChannelBufferStreamOutput extends StreamOutput { - - private final ChannelBuffer buffer; - private int offset; - - public ChannelBufferStreamOutput(ChannelBuffer buffer) { - this.buffer = buffer; - this.offset = buffer.readerIndex(); - } - - @Override - public void writeByte(byte b) throws IOException { - buffer.setByte(offset++, b); - } - - @Override - public void writeBytes(byte[] b, int offset, int length) throws IOException { - buffer.setBytes(this.offset, b, offset, length); - this.offset += length; - } - - @Override - public void flush() throws IOException { - } - - @Override - public void close() throws IOException { - } - - @Override - public void reset() throws IOException { - throw new UnsupportedOperationException(); - } - } } diff --git a/core/src/main/java/org/elasticsearch/transport/netty/NettyUtils.java b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Utils.java similarity index 96% rename from core/src/main/java/org/elasticsearch/transport/netty/NettyUtils.java rename to modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Utils.java index f3fdde5e91c..37fc483d4f4 100644 --- a/core/src/main/java/org/elasticsearch/transport/netty/NettyUtils.java +++ b/modules/transport-netty3/src/main/java/org/elasticsearch/transport/netty3/Netty3Utils.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; @@ -34,7 +34,7 @@ import java.util.ArrayList; /** */ -public class NettyUtils { +public class Netty3Utils { /** * Here we go.... @@ -96,8 +96,8 @@ public class NettyUtils { InternalLoggerFactory.setDefaultFactory(new InternalLoggerFactory() { @Override public InternalLogger newInstance(String name) { - name = name.replace("org.jboss.netty.", "netty.").replace("org.jboss.netty.", "netty."); - return new NettyInternalESLogger(Loggers.getLogger(name)); + name = name.replace("org.jboss.netty.", "netty3.").replace("org.jboss.netty.", "netty3."); + return new Netty3InternalESLogger(Loggers.getLogger(name)); } }); @@ -105,7 +105,6 @@ public class NettyUtils { } public static void setup() { - } /** diff --git a/core/src/main/java/org/elasticsearch/transport/BaseTransportResponseHandler.java b/modules/transport-netty3/src/main/plugin-metadata/plugin-security.policy similarity index 72% rename from core/src/main/java/org/elasticsearch/transport/BaseTransportResponseHandler.java rename to modules/transport-netty3/src/main/plugin-metadata/plugin-security.policy index df2362dd47b..45c8cd923aa 100644 --- a/core/src/main/java/org/elasticsearch/transport/BaseTransportResponseHandler.java +++ b/modules/transport-netty3/src/main/plugin-metadata/plugin-security.policy @@ -17,11 +17,8 @@ * under the License. */ -package org.elasticsearch.transport; - -/** - * A simple based class that always spawns. - */ -public abstract class BaseTransportResponseHandler implements TransportResponseHandler { - -} \ No newline at end of file +grant { + // Netty SelectorUtil wants to change this, because of https://bugs.openjdk.java.net/browse/JDK-6427854 + // the bug says it only happened rarely, and that its fixed, but apparently it still happens rarely! + permission java.util.PropertyPermission "sun.nio.ch.bugLevel", "write"; +}; \ No newline at end of file diff --git a/modules/transport-netty3/src/test/java/org/elasticsearch/ESNetty3IntegTestCase.java b/modules/transport-netty3/src/test/java/org/elasticsearch/ESNetty3IntegTestCase.java new file mode 100644 index 00000000000..a7f8f254fcc --- /dev/null +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/ESNetty3IntegTestCase.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch; + +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.Netty3Plugin; +import org.elasticsearch.transport.netty3.Netty3Transport; + +import java.util.Collection; + +@ESIntegTestCase.SuppressLocalMode +public abstract class ESNetty3IntegTestCase extends ESIntegTestCase { + + @Override + protected boolean ignoreExternalCluster() { + return true; + } + + @Override + protected boolean addMockTransportService() { + return false; + } + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal)); + // randomize netty settings + if (randomBoolean()) { + builder.put(Netty3Transport.WORKER_COUNT.getKey(), random().nextInt(3) + 1); + } + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty3"); + builder.put(NetworkModule.HTTP_TYPE_KEY, "netty3"); + return builder.build(); + } + + @Override + protected Settings transportClientSettings() { + Settings.Builder builder = Settings.builder().put(super.transportClientSettings()); + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty3"); + return builder.build(); + } + + @Override + protected Collection> nodePlugins() { + return pluginList(Netty3Plugin.class); + } + + @Override + protected Collection> transportClientPlugins() { + return pluginList(Netty3Plugin.class); + } + +} diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpChannelTests.java similarity index 93% rename from core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpChannelTests.java index a56e9993434..41ea8612fed 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpChannelTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpChannelTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http.netty3; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -25,7 +25,7 @@ import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.http.HttpTransportSettings; -import org.elasticsearch.http.netty.cors.CorsHandler; +import org.elasticsearch.http.netty3.cors.Netty3CorsHandler; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; @@ -61,12 +61,12 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class NettyHttpChannelTests extends ESTestCase { +public class Netty3HttpChannelTests extends ESTestCase { private NetworkService networkService; private ThreadPool threadPool; private MockBigArrays bigArrays; - private NettyHttpServerTransport httpServerTransport; + private Netty3HttpServerTransport httpServerTransport; @Before public void setup() throws Exception { @@ -159,7 +159,7 @@ public class NettyHttpChannelTests extends ESTestCase { } public void testThatAnyOriginWorks() { - final String originValue = CorsHandler.ANY_ORIGIN; + final String originValue = Netty3CorsHandler.ANY_ORIGIN; Settings settings = Settings.builder() .put(SETTING_CORS_ENABLED.getKey(), true) .put(SETTING_CORS_ALLOW_ORIGIN.getKey(), originValue) @@ -174,14 +174,15 @@ public class NettyHttpChannelTests extends ESTestCase { public void testHeadersSet() { Settings settings = Settings.builder().build(); - httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays, threadPool); + httpServerTransport = new Netty3HttpServerTransport(settings, networkService, bigArrays, threadPool); HttpRequest httpRequest = new TestHttpRequest(); httpRequest.headers().add(HttpHeaders.Names.ORIGIN, "remote"); WriteCapturingChannel writeCapturingChannel = new WriteCapturingChannel(); - NettyHttpRequest request = new NettyHttpRequest(httpRequest, writeCapturingChannel); + Netty3HttpRequest request = new Netty3HttpRequest(httpRequest, writeCapturingChannel); // send a response - NettyHttpChannel channel = new NettyHttpChannel(httpServerTransport, request, null, randomBoolean()); + Netty3HttpChannel channel = + new Netty3HttpChannel(httpServerTransport, request, null, randomBoolean(), threadPool.getThreadContext()); TestReponse resp = new TestReponse(); final String customHeader = "custom-header"; final String customHeaderValue = "xyz"; @@ -200,14 +201,15 @@ public class NettyHttpChannelTests extends ESTestCase { private HttpResponse execRequestWithCors(final Settings settings, final String originValue, final String host) { // construct request and send it over the transport layer - httpServerTransport = new NettyHttpServerTransport(settings, networkService, bigArrays, threadPool); + httpServerTransport = new Netty3HttpServerTransport(settings, networkService, bigArrays, threadPool); HttpRequest httpRequest = new TestHttpRequest(); httpRequest.headers().add(HttpHeaders.Names.ORIGIN, originValue); httpRequest.headers().add(HttpHeaders.Names.HOST, host); WriteCapturingChannel writeCapturingChannel = new WriteCapturingChannel(); - NettyHttpRequest request = new NettyHttpRequest(httpRequest, writeCapturingChannel); + Netty3HttpRequest request = new Netty3HttpRequest(httpRequest, writeCapturingChannel); - NettyHttpChannel channel = new NettyHttpChannel(httpServerTransport, request, null, randomBoolean()); + Netty3HttpChannel channel = + new Netty3HttpChannel(httpServerTransport, request, null, randomBoolean(), threadPool.getThreadContext()); channel.sendResponse(new TestReponse()); // get the response diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpClient.java similarity index 96% rename from core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpClient.java index 264876b7963..8ed45b10b2e 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpClient.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpClient.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http.netty3; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -54,9 +54,9 @@ import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.HOST; import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1; /** - * Tiny helper to send http requests over netty. + * Tiny helper to send http requests over netty3. */ -public class NettyHttpClient implements Closeable { +public class Netty3HttpClient implements Closeable { public static Collection returnHttpResponseBodies(Collection responses) { List list = new ArrayList<>(responses.size()); @@ -76,7 +76,7 @@ public class NettyHttpClient implements Closeable { private final ClientBootstrap clientBootstrap; - public NettyHttpClient() { + public Netty3HttpClient() { clientBootstrap = new ClientBootstrap(new NioClientSocketChannelFactory());; } @@ -104,7 +104,7 @@ public class NettyHttpClient implements Closeable { } @SafeVarargs // Safe not because it doesn't do anything with the type parameters but because it won't leak them into other methods. - private final Collection processRequestsWithBody(HttpMethod method, SocketAddress remoteAddress, Tuple processRequestsWithBody(HttpMethod method, SocketAddress remoteAddress, Tuple... urisAndBodies) throws InterruptedException { Collection requests = new ArrayList<>(urisAndBodies.length); for (Tuple uriAndBody : urisAndBodies) { diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpPublishPortTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpPublishPortTests.java similarity index 95% rename from core/src/test/java/org/elasticsearch/http/netty/NettyHttpPublishPortTests.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpPublishPortTests.java index c6e2c93463f..05c7ee36a24 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpPublishPortTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpPublishPortTests.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http.netty3; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; @@ -32,11 +32,11 @@ import java.util.List; import static java.net.InetAddress.getByName; import static java.util.Arrays.asList; -import static org.elasticsearch.http.netty.NettyHttpServerTransport.resolvePublishPort; +import static org.elasticsearch.http.netty3.Netty3HttpServerTransport.resolvePublishPort; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -public class NettyHttpPublishPortTests extends ESTestCase { +public class Netty3HttpPublishPortTests extends ESTestCase { public void testHttpPublishPort() throws Exception { int boundPort = randomIntBetween(9000, 9100); diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpRequestSizeLimitIT.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpRequestSizeLimitIT.java similarity index 95% rename from core/src/test/java/org/elasticsearch/http/netty/NettyHttpRequestSizeLimitIT.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpRequestSizeLimitIT.java index 47b34811df6..66d9f2c88d1 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpRequestSizeLimitIT.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpRequestSizeLimitIT.java @@ -16,8 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http.netty3; +import org.elasticsearch.ESNetty3IntegTestCase; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; @@ -26,7 +27,6 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; -import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.jboss.netty.handler.codec.http.HttpResponse; @@ -45,7 +45,7 @@ import static org.hamcrest.Matchers.hasSize; * a single node "cluster". We also force test infrastructure to use the node client instead of the transport client for the same reason. */ @ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numClientNodes = 0, numDataNodes = 1, transportClientRatio = 0) -public class NettyHttpRequestSizeLimitIT extends ESIntegTestCase { +public class Netty3HttpRequestSizeLimitIT extends ESNetty3IntegTestCase { private static final ByteSizeValue LIMIT = new ByteSizeValue(2, ByteSizeUnit.KB); @Override @@ -81,7 +81,7 @@ public class NettyHttpRequestSizeLimitIT extends ESIntegTestCase { InetSocketTransportAddress inetSocketTransportAddress = (InetSocketTransportAddress) randomFrom(httpServerTransport.boundAddress ().boundAddresses()); - try (NettyHttpClient nettyHttpClient = new NettyHttpClient()) { + try (Netty3HttpClient nettyHttpClient = new Netty3HttpClient()) { Collection singleResponse = nettyHttpClient.post(inetSocketTransportAddress.address(), requests[0]); assertThat(singleResponse, hasSize(1)); assertAtLeastOnceExpectedStatus(singleResponse, HttpResponseStatus.OK); @@ -106,7 +106,7 @@ public class NettyHttpRequestSizeLimitIT extends ESIntegTestCase { InetSocketTransportAddress inetSocketTransportAddress = (InetSocketTransportAddress) randomFrom(httpServerTransport.boundAddress ().boundAddresses()); - try (NettyHttpClient nettyHttpClient = new NettyHttpClient()) { + try (Netty3HttpClient nettyHttpClient = new Netty3HttpClient()) { Collection responses = nettyHttpClient.put(inetSocketTransportAddress.address(), requestUris); assertThat(responses, hasSize(requestUris.length)); assertAllInExpectedStatus(responses, HttpResponseStatus.OK); diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerPipeliningTests.java similarity index 83% rename from core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerPipeliningTests.java index 6fc9a4e674a..ebaff71e0c2 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerPipeliningTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerPipeliningTests.java @@ -16,17 +16,16 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http.netty3; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.http.HttpServerTransport; -import org.elasticsearch.http.netty.NettyHttpServerTransport.HttpChannelPipelineFactory; -import org.elasticsearch.http.netty.pipelining.OrderedDownstreamChannelEvent; -import org.elasticsearch.http.netty.pipelining.OrderedUpstreamMessageEvent; +import org.elasticsearch.http.netty3.Netty3HttpServerTransport.HttpChannelPipelineFactory; +import org.elasticsearch.http.netty3.pipelining.OrderedDownstreamChannelEvent; +import org.elasticsearch.http.netty3.pipelining.OrderedUpstreamMessageEvent; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -54,7 +53,7 @@ import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; -import static org.elasticsearch.http.netty.NettyHttpClient.returnHttpResponseBodies; +import static org.elasticsearch.http.netty3.Netty3HttpClient.returnHttpResponseBodies; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -65,11 +64,11 @@ import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1; /** * This test just tests, if he pipelining works in general with out any connection the elasticsearch handler */ -public class NettyHttpServerPipeliningTests extends ESTestCase { +public class Netty3HttpServerPipeliningTests extends ESTestCase { private NetworkService networkService; private ThreadPool threadPool; private MockBigArrays bigArrays; - private CustomNettyHttpServerTransport httpServerTransport; + private CustomNetty3HttpServerTransport httpServerTransport; @Before public void setup() throws Exception { @@ -93,12 +92,13 @@ public class NettyHttpServerPipeliningTests extends ESTestCase { .put("http.pipelining", true) .put("http.port", "0") .build(); - httpServerTransport = new CustomNettyHttpServerTransport(settings); + httpServerTransport = new CustomNetty3HttpServerTransport(settings); httpServerTransport.start(); - InetSocketTransportAddress transportAddress = (InetSocketTransportAddress) randomFrom(httpServerTransport.boundAddress().boundAddresses()); + InetSocketTransportAddress transportAddress = (InetSocketTransportAddress) randomFrom(httpServerTransport.boundAddress() + .boundAddresses()); List requests = Arrays.asList("/firstfast", "/slow?sleep=500", "/secondfast", "/slow?sleep=1000", "/thirdfast"); - try (NettyHttpClient nettyHttpClient = new NettyHttpClient()) { + try (Netty3HttpClient nettyHttpClient = new Netty3HttpClient()) { Collection responses = nettyHttpClient.get(transportAddress.address(), requests.toArray(new String[]{})); Collection responseBodies = returnHttpResponseBodies(responses); assertThat(responseBodies, contains("/firstfast", "/slow?sleep=500", "/secondfast", "/slow?sleep=1000", "/thirdfast")); @@ -110,12 +110,13 @@ public class NettyHttpServerPipeliningTests extends ESTestCase { .put("http.pipelining", false) .put("http.port", "0") .build(); - httpServerTransport = new CustomNettyHttpServerTransport(settings); + httpServerTransport = new CustomNetty3HttpServerTransport(settings); httpServerTransport.start(); - InetSocketTransportAddress transportAddress = (InetSocketTransportAddress) randomFrom(httpServerTransport.boundAddress().boundAddresses()); + InetSocketTransportAddress transportAddress = (InetSocketTransportAddress) randomFrom(httpServerTransport.boundAddress() + .boundAddresses()); List requests = Arrays.asList("/slow?sleep=1000", "/firstfast", "/secondfast", "/thirdfast", "/slow?sleep=500"); - try (NettyHttpClient nettyHttpClient = new NettyHttpClient()) { + try (Netty3HttpClient nettyHttpClient = new Netty3HttpClient()) { Collection responses = nettyHttpClient.get(transportAddress.address(), requests.toArray(new String[]{})); List responseBodies = new ArrayList<>(returnHttpResponseBodies(responses)); // we cannot be sure about the order of the fast requests, but the slow ones should have to be last @@ -125,26 +126,27 @@ public class NettyHttpServerPipeliningTests extends ESTestCase { } } - class CustomNettyHttpServerTransport extends NettyHttpServerTransport { + class CustomNetty3HttpServerTransport extends Netty3HttpServerTransport { private final ExecutorService executorService; - public CustomNettyHttpServerTransport(Settings settings) { - super(settings, NettyHttpServerPipeliningTests.this.networkService, - NettyHttpServerPipeliningTests.this.bigArrays, NettyHttpServerPipeliningTests.this.threadPool + public CustomNetty3HttpServerTransport(Settings settings) { + super(settings, Netty3HttpServerPipeliningTests.this.networkService, + Netty3HttpServerPipeliningTests.this.bigArrays, Netty3HttpServerPipeliningTests.this.threadPool ); this.executorService = Executors.newFixedThreadPool(5); } @Override public ChannelPipelineFactory configureServerChannelPipelineFactory() { - return new CustomHttpChannelPipelineFactory(this, executorService, NettyHttpServerPipeliningTests.this.threadPool.getThreadContext()); + return new CustomHttpChannelPipelineFactory(this, executorService, Netty3HttpServerPipeliningTests.this.threadPool + .getThreadContext()); } @Override - public HttpServerTransport stop() { + public void stop() { executorService.shutdownNow(); - return super.stop(); + super.stop(); } } @@ -152,7 +154,8 @@ public class NettyHttpServerPipeliningTests extends ESTestCase { private final ExecutorService executorService; - public CustomHttpChannelPipelineFactory(NettyHttpServerTransport transport, ExecutorService executorService, ThreadContext threadContext) { + public CustomHttpChannelPipelineFactory(Netty3HttpServerTransport transport, ExecutorService executorService, + ThreadContext threadContext) { super(transport, randomBoolean(), threadContext); this.executorService = executorService; } @@ -214,7 +217,8 @@ public class NettyHttpServerPipeliningTests extends ESTestCase { QueryStringDecoder decoder = new QueryStringDecoder(request.getUri()); - final int timeout = request.getUri().startsWith("/slow") && decoder.getParameters().containsKey("sleep") ? Integer.valueOf(decoder.getParameters().get("sleep").get(0)) : 0; + final int timeout = request.getUri().startsWith("/slow") && decoder.getParameters().containsKey("sleep") + ? Integer.valueOf(decoder.getParameters().get("sleep").get(0)) : 0; if (timeout > 0) { try { Thread.sleep(timeout); diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerTransportTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerTransportTests.java similarity index 89% rename from core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerTransportTests.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerTransportTests.java index 3cf9c1aa029..da7e320a557 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpServerTransportTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3HttpServerTransportTests.java @@ -17,13 +17,13 @@ * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http.netty3; import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.http.netty.cors.CorsConfig; +import org.elasticsearch.http.netty3.cors.Netty3CorsConfig; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -45,9 +45,9 @@ import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ENABLED; import static org.hamcrest.Matchers.equalTo; /** - * Tests for the {@link NettyHttpServerTransport} class. + * Tests for the {@link Netty3HttpServerTransport} class. */ -public class NettyHttpServerTransportTests extends ESTestCase { +public class Netty3HttpServerTransportTests extends ESTestCase { private NetworkService networkService; private ThreadPool threadPool; private MockBigArrays bigArrays; @@ -79,8 +79,8 @@ public class NettyHttpServerTransportTests extends ESTestCase { .put(SETTING_CORS_ALLOW_HEADERS.getKey(), Strings.collectionToCommaDelimitedString(headers)) .put(SETTING_CORS_ALLOW_CREDENTIALS.getKey(), true) .build(); - final NettyHttpServerTransport transport = new NettyHttpServerTransport(settings, networkService, bigArrays, threadPool); - final CorsConfig corsConfig = transport.getCorsConfig(); + final Netty3HttpServerTransport transport = new Netty3HttpServerTransport(settings, networkService, bigArrays, threadPool); + final Netty3CorsConfig corsConfig = transport.getCorsConfig(); assertThat(corsConfig.isAnyOriginSupported(), equalTo(true)); assertThat(corsConfig.allowedRequestHeaders(), equalTo(headers)); assertThat(corsConfig.allowedRequestMethods().stream().map(HttpMethod::getName).collect(Collectors.toSet()), equalTo(methods)); diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3PipeliningDisabledIT.java similarity index 90% rename from core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3PipeliningDisabledIT.java index 576456c0647..09325e2ed9b 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningDisabledIT.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3PipeliningDisabledIT.java @@ -16,14 +16,14 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http.netty3; +import org.elasticsearch.ESNetty3IntegTestCase; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.http.HttpServerTransport; -import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.jboss.netty.handler.codec.http.HttpResponse; @@ -33,7 +33,7 @@ import java.util.Collection; import java.util.List; import java.util.Locale; -import static org.elasticsearch.http.netty.NettyHttpClient.returnOpaqueIds; +import static org.elasticsearch.http.netty3.Netty3HttpClient.returnOpaqueIds; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.hasSize; @@ -41,7 +41,7 @@ import static org.hamcrest.Matchers.hasSize; * */ @ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1) -public class NettyPipeliningDisabledIT extends ESIntegTestCase { +public class Netty3PipeliningDisabledIT extends ESNetty3IntegTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() @@ -59,7 +59,7 @@ public class NettyPipeliningDisabledIT extends ESIntegTestCase { TransportAddress[] boundAddresses = httpServerTransport.boundAddress().boundAddresses(); InetSocketTransportAddress inetSocketTransportAddress = (InetSocketTransportAddress) randomFrom(boundAddresses); - try (NettyHttpClient nettyHttpClient = new NettyHttpClient()) { + try (Netty3HttpClient nettyHttpClient = new Netty3HttpClient()) { Collection responses = nettyHttpClient.get(inetSocketTransportAddress.address(), requests); assertThat(responses, hasSize(requests.length)); diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3PipeliningEnabledIT.java similarity index 57% rename from core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3PipeliningEnabledIT.java index 7ca714ece93..4abc24ceba0 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyPipeliningEnabledIT.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/Netty3PipeliningEnabledIT.java @@ -16,46 +16,34 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http.netty3; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.InetSocketTransportAddress; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; +import org.elasticsearch.transport.Netty3Plugin; import org.jboss.netty.handler.codec.http.HttpResponse; +import java.net.InetSocketAddress; import java.util.Collection; import java.util.Locale; -import static org.elasticsearch.http.netty.NettyHttpClient.returnOpaqueIds; +import static org.elasticsearch.http.netty3.Netty3HttpClient.returnOpaqueIds; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; +public class Netty3PipeliningEnabledIT extends ESIntegTestCase { -@ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1) -public class NettyPipeliningEnabledIT extends ESIntegTestCase { @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put(NetworkModule.HTTP_ENABLED.getKey(), true) - .put("http.pipelining", true) - .build(); + protected Collection> transportClientPlugins() { + return pluginList(Netty3Plugin.class); } public void testThatNettyHttpServerSupportsPipelining() throws Exception { String[] requests = new String[]{"/", "/_nodes/stats", "/", "/_cluster/state", "/"}; - HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class); - TransportAddress[] boundAddresses = httpServerTransport.boundAddress().boundAddresses(); - InetSocketTransportAddress inetSocketTransportAddress = (InetSocketTransportAddress) randomFrom(boundAddresses); - - try (NettyHttpClient nettyHttpClient = new NettyHttpClient()) { - Collection responses = nettyHttpClient.get(inetSocketTransportAddress.address(), requests); + InetSocketAddress inetSocketAddress = randomFrom(cluster().httpAddresses()); + try (Netty3HttpClient nettyHttpClient = new Netty3HttpClient()) { + Collection responses = nettyHttpClient.get(inetSocketAddress, requests); assertThat(responses, hasSize(5)); Collection opaqueIds = returnOpaqueIds(responses); diff --git a/core/src/test/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandlerTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/pipelining/HttpPipeliningHandlerTests.java similarity index 99% rename from core/src/test/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandlerTests.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/pipelining/HttpPipeliningHandlerTests.java index d914018d4f6..7db2368d344 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/pipelining/HttpPipeliningHandlerTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/http/netty3/pipelining/HttpPipeliningHandlerTests.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.http.netty.pipelining; +package org.elasticsearch.http.netty3.pipelining; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.test.ESTestCase; diff --git a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/Netty3SizeHeaderFrameDecoderTests.java similarity index 89% rename from core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/transport/Netty3SizeHeaderFrameDecoderTests.java index f436b719260..e4047798071 100644 --- a/core/src/test/java/org/elasticsearch/transport/NettySizeHeaderFrameDecoderTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/Netty3SizeHeaderFrameDecoderTests.java @@ -19,11 +19,8 @@ package org.elasticsearch.transport; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkService; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; @@ -32,7 +29,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.netty.NettyTransport; +import org.elasticsearch.transport.netty3.Netty3Transport; import org.junit.After; import org.junit.Before; @@ -48,16 +45,16 @@ import static org.hamcrest.Matchers.is; * This test checks, if a HTTP look-alike request (starting with a HTTP method and a space) * actually returns text response instead of just dropping the connection */ -public class NettySizeHeaderFrameDecoderTests extends ESTestCase { +public class Netty3SizeHeaderFrameDecoderTests extends ESTestCase { private final Settings settings = Settings.builder() - .put("node.name", "NettySizeHeaderFrameDecoderTests") + .put("node.name", "Netty3SizeHeaderFrameDecoderTests") .put(TransportSettings.BIND_HOST.getKey(), "127.0.0.1") .put(TransportSettings.PORT.getKey(), "0") .build(); private ThreadPool threadPool; - private NettyTransport nettyTransport; + private Netty3Transport nettyTransport; private int port; private InetAddress host; @@ -66,7 +63,7 @@ public class NettySizeHeaderFrameDecoderTests extends ESTestCase { threadPool = new ThreadPool(settings); NetworkService networkService = new NetworkService(settings); BigArrays bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); - nettyTransport = new NettyTransport(settings, threadPool, networkService, bigArrays, new NamedWriteableRegistry(), + nettyTransport = new Netty3Transport(settings, threadPool, networkService, bigArrays, new NamedWriteableRegistry(), new NoneCircuitBreakerService()); nettyTransport.start(); TransportService transportService = new TransportService(settings, nettyTransport, threadPool); diff --git a/core/src/test/java/org/elasticsearch/transport/netty/ChannelBufferBytesReferenceTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/ChannelBufferBytesReferenceTests.java similarity index 54% rename from core/src/test/java/org/elasticsearch/transport/netty/ChannelBufferBytesReferenceTests.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/ChannelBufferBytesReferenceTests.java index a284f6ea911..ac40e16afe9 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/ChannelBufferBytesReferenceTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/ChannelBufferBytesReferenceTests.java @@ -16,13 +16,12 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.AbstractBytesReferenceTestCase; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; -import org.elasticsearch.transport.netty.NettyUtils; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; @@ -38,25 +37,42 @@ public class ChannelBufferBytesReferenceTests extends AbstractBytesReferenceTest assertEquals(out.size(), length); BytesReference ref = out.bytes(); assertEquals(ref.length(), length); - BytesArray bytesArray = ref.toBytesArray(); - return NettyUtils.toBytesReference(ChannelBuffers.wrappedBuffer(bytesArray.array(), bytesArray.arrayOffset(), - bytesArray.length())); + BytesRef bytesRef = ref.toBytesRef(); + final ChannelBuffer channelBuffer = ChannelBuffers.wrappedBuffer(bytesRef.bytes, bytesRef.offset, bytesRef.length); + return Netty3Utils.toBytesReference(channelBuffer); } public void testSliceOnAdvancedBuffer() throws IOException { BytesReference bytesReference = newBytesReference(randomIntBetween(10, 3 * PAGE_SIZE)); - BytesArray bytesArray = bytesReference.toBytesArray(); - - ChannelBuffer channelBuffer = ChannelBuffers.wrappedBuffer(bytesArray.array(), bytesArray.arrayOffset(), - bytesArray.length()); + BytesRef bytesRef = bytesReference.toBytesRef(); + ChannelBuffer channelBuffer = ChannelBuffers.wrappedBuffer(bytesRef.bytes, bytesRef.offset, + bytesRef.length); int numBytesToRead = randomIntBetween(1, 5); for (int i = 0; i < numBytesToRead; i++) { channelBuffer.readByte(); } - BytesReference other = NettyUtils.toBytesReference(channelBuffer); + BytesReference other = Netty3Utils.toBytesReference(channelBuffer); BytesReference slice = bytesReference.slice(numBytesToRead, bytesReference.length() - numBytesToRead); assertEquals(other, slice); - assertEquals(other.slice(3, 1), slice.slice(3, 1)); } + + public void testImmutable() throws IOException { + BytesReference bytesReference = newBytesReference(randomIntBetween(10, 3 * PAGE_SIZE)); + BytesRef bytesRef = BytesRef.deepCopyOf(bytesReference.toBytesRef()); + ChannelBuffer channelBuffer = ChannelBuffers.wrappedBuffer(bytesRef.bytes, bytesRef.offset, + bytesRef.length); + ChannelBufferBytesReference channelBufferBytesReference = new ChannelBufferBytesReference(channelBuffer, bytesRef.length); + assertEquals(channelBufferBytesReference, bytesReference); + channelBuffer.readInt(); // this advances the index of the channel buffer + assertEquals(channelBufferBytesReference, bytesReference); + assertEquals(bytesRef, channelBufferBytesReference.toBytesRef()); + + BytesRef unicodeBytes = new BytesRef(randomUnicodeOfCodepointLength(100)); + channelBuffer = ChannelBuffers.wrappedBuffer(unicodeBytes.bytes, unicodeBytes.offset, unicodeBytes.length); + channelBufferBytesReference = new ChannelBufferBytesReference(channelBuffer, unicodeBytes.length); + String utf8ToString = channelBufferBytesReference.utf8ToString(); + channelBuffer.readInt(); // this advances the index of the channel buffer + assertEquals(utf8ToString, channelBufferBytesReference.utf8ToString()); + } } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3ScheduledPingTests.java similarity index 93% rename from core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3ScheduledPingTests.java index c69f56c2cbd..16694dbed0b 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3ScheduledPingTests.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -31,7 +31,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.BaseTransportResponseHandler; +import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; @@ -49,9 +49,7 @@ import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -/** - */ -public class NettyScheduledPingTests extends ESTestCase { +public class Netty3ScheduledPingTests extends ESTestCase { public void testScheduledPing() throws Exception { ThreadPool threadPool = new TestThreadPool(getClass().getName()); @@ -64,14 +62,14 @@ public class NettyScheduledPingTests extends ESTestCase { CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService(); NamedWriteableRegistry registryA = new NamedWriteableRegistry(); - final NettyTransport nettyA = new NettyTransport(settings, threadPool, new NetworkService(settings), + final Netty3Transport nettyA = new Netty3Transport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, registryA, circuitBreakerService); MockTransportService serviceA = new MockTransportService(settings, nettyA, threadPool); serviceA.start(); serviceA.acceptIncomingRequests(); NamedWriteableRegistry registryB = new NamedWriteableRegistry(); - final NettyTransport nettyB = new NettyTransport(settings, threadPool, new NetworkService(settings), + final Netty3Transport nettyB = new Netty3Transport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, registryB, circuitBreakerService); MockTransportService serviceB = new MockTransportService(settings, nettyB, threadPool); @@ -114,7 +112,7 @@ public class NettyScheduledPingTests extends ESTestCase { for (int i = 0; i < rounds; i++) { serviceB.submitRequest(nodeA, "sayHello", TransportRequest.Empty.INSTANCE, TransportRequestOptions.builder().withCompress(randomBoolean()).build(), - new BaseTransportResponseHandler() { + new TransportResponseHandler() { @Override public TransportResponse.Empty newInstance() { return TransportResponse.Empty.INSTANCE; diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportIT.java similarity index 82% rename from core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportIT.java index 310f804ef7c..f2863c5b5d2 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportIT.java @@ -16,8 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; +import org.elasticsearch.ESNetty3IntegTestCase; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; @@ -26,49 +27,45 @@ import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportSettings; import org.jboss.netty.channel.Channel; -import org.jboss.netty.channel.ChannelPipeline; -import org.jboss.netty.channel.ChannelPipelineFactory; import java.io.IOException; import java.net.InetSocketAddress; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.List; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; -/** - * - */ @ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1) -public class NettyTransportIT extends ESIntegTestCase { +public class Netty3TransportIT extends ESNetty3IntegTestCase { // static so we can use it in anonymous classes private static String channelProfileName = null; @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder().put(super.nodeSettings(nodeOrdinal)) - .put(Node.NODE_MODE_SETTING.getKey(), "network") .put(NetworkModule.TRANSPORT_TYPE_KEY, "exception-throwing").build(); } @Override protected Collection> nodePlugins() { - return pluginList(ExceptionThrowingNettyTransport.TestPlugin.class); + List> list = new ArrayList<>(); + list.add(ExceptionThrowingNetty3Transport.TestPlugin.class); + list.addAll(super.nodePlugins()); + return Collections.unmodifiableCollection(list); } public void testThatConnectionFailsAsIntended() throws Exception { @@ -84,18 +81,22 @@ public class NettyTransportIT extends ESIntegTestCase { } } - public static final class ExceptionThrowingNettyTransport extends NettyTransport { + public static final class ExceptionThrowingNetty3Transport extends Netty3Transport { public static class TestPlugin extends Plugin { public void onModule(NetworkModule module) { - module.registerTransport("exception-throwing", ExceptionThrowingNettyTransport.class); + module.registerTransport("exception-throwing", ExceptionThrowingNetty3Transport.class); } } @Inject - public ExceptionThrowingNettyTransport(Settings settings, ThreadPool threadPool, NetworkService networkService, BigArrays bigArrays, - NamedWriteableRegistry namedWriteableRegistry, - CircuitBreakerService circuitBreakerService) { + public ExceptionThrowingNetty3Transport( + Settings settings, + ThreadPool threadPool, + NetworkService networkService, + BigArrays bigArrays, + NamedWriteableRegistry namedWriteableRegistry, + CircuitBreakerService circuitBreakerService) { super(settings, threadPool, networkService, bigArrays, namedWriteableRegistry, circuitBreakerService); } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortIntegrationIT.java similarity index 91% rename from core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortIntegrationIT.java index 0189b73799a..8b9b2d397db 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortIntegrationIT.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortIntegrationIT.java @@ -16,8 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; +import org.elasticsearch.ESNetty3IntegTestCase; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; @@ -30,11 +31,11 @@ import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; -import org.elasticsearch.node.Node; -import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.junit.annotations.Network; +import org.elasticsearch.transport.MockTransportClient; +import org.elasticsearch.transport.Netty3Plugin; import java.net.InetAddress; import java.util.Locale; @@ -48,7 +49,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; @ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1, numClientNodes = 0) -public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase { +public class Netty3TransportMultiPortIntegrationIT extends ESNetty3IntegTestCase { private static int randomPort = -1; private static String randomPortRange; @@ -62,8 +63,6 @@ public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase { Settings.Builder builder = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) .put("network.host", "127.0.0.1") - .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty") - .put(Node.NODE_MODE_SETTING.getKey(), "network") .put("transport.profiles.client1.port", randomPortRange) .put("transport.profiles.client1.publish_host", "127.0.0.7") .put("transport.profiles.client1.publish_port", "4321") @@ -74,10 +73,10 @@ public class NettyTransportMultiPortIntegrationIT extends ESIntegTestCase { public void testThatTransportClientCanConnect() throws Exception { Settings settings = Settings.builder() .put("cluster.name", internalCluster().getClusterName()) - .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty") + .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty3") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); - try (TransportClient transportClient = TransportClient.builder().settings(settings).build()) { + try (TransportClient transportClient = new MockTransportClient(settings, Netty3Plugin.class)) { transportClient.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("127.0.0.1"), randomPort)); ClusterHealthResponse response = transportClient.admin().cluster().prepareHealth().get(); assertThat(response.getStatus(), is(ClusterHealthStatus.GREEN)); diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortTests.java similarity index 96% rename from core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortTests.java index 352c90d2317..f21edf3f596 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportMultiPortTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportMultiPortTests.java @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -36,7 +36,7 @@ import org.junit.Before; import static org.hamcrest.Matchers.is; -public class NettyTransportMultiPortTests extends ESTestCase { +public class Netty3TransportMultiPortTests extends ESTestCase { private String host; @@ -135,7 +135,7 @@ public class NettyTransportMultiPortTests extends ESTestCase { private TcpTransport startTransport(Settings settings, ThreadPool threadPool) { BigArrays bigArrays = new MockBigArrays(Settings.EMPTY, new NoneCircuitBreakerService()); - TcpTransport transport = new NettyTransport(settings, threadPool, new NetworkService(settings), bigArrays, + TcpTransport transport = new Netty3Transport(settings, threadPool, new NetworkService(settings), bigArrays, new NamedWriteableRegistry(), new NoneCircuitBreakerService()); transport.start(); diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportPublishAddressIT.java similarity index 88% rename from core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportPublishAddressIT.java index 75faa8c49b4..a936ad7d191 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportPublishAddressIT.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3TransportPublishAddressIT.java @@ -17,17 +17,16 @@ * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; +import org.elasticsearch.ESNetty3IntegTestCase; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; -import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import java.net.Inet4Address; @@ -41,14 +40,7 @@ import static org.hamcrest.Matchers.instanceOf; * different ports on ipv4 and ipv6. */ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) -public class NettyTransportPublishAddressIT extends ESIntegTestCase { - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty") - .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); - } +public class Netty3TransportPublishAddressIT extends ESNetty3IntegTestCase { public void testDifferentPorts() throws Exception { if (!NetworkUtils.SUPPORTS_V6) { diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyUtilsTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3UtilsTests.java similarity index 69% rename from core/src/test/java/org/elasticsearch/transport/netty/NettyUtilsTests.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3UtilsTests.java index fa8f30249bb..8188569e3f4 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyUtilsTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/Netty3UtilsTests.java @@ -16,8 +16,10 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.AbstractBytesReferenceTestCase; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.ReleasableBytesStreamOutput; @@ -30,13 +32,13 @@ import org.jboss.netty.buffer.CompositeChannelBuffer; import java.io.IOException; -public class NettyUtilsTests extends ESTestCase { +public class Netty3UtilsTests extends ESTestCase { private static final int PAGE_SIZE = BigArrays.BYTE_PAGE_SIZE; private final BigArrays bigarrays = new BigArrays(null, new NoneCircuitBreakerService(), false); public void testToChannelBufferWithEmptyRef() throws IOException { - ChannelBuffer channelBuffer = NettyUtils.toChannelBuffer(getRandomizedBytesReference(0)); + ChannelBuffer channelBuffer = Netty3Utils.toChannelBuffer(getRandomizedBytesReference(0)); assertSame(ChannelBuffers.EMPTY_BUFFER, channelBuffer); } @@ -45,30 +47,31 @@ public class NettyUtilsTests extends ESTestCase { int sliceOffset = randomIntBetween(0, ref.length()); int sliceLength = randomIntBetween(ref.length() - sliceOffset, ref.length() - sliceOffset); BytesReference slice = ref.slice(sliceOffset, sliceLength); - ChannelBuffer channelBuffer = NettyUtils.toChannelBuffer(slice); - BytesReference bytesReference = NettyUtils.toBytesReference(channelBuffer); - assertArrayEquals(slice.toBytes(), bytesReference.toBytes()); + ChannelBuffer channelBuffer = Netty3Utils.toChannelBuffer(slice); + BytesReference bytesReference = Netty3Utils.toBytesReference(channelBuffer); + assertArrayEquals(BytesReference.toBytes(slice), BytesReference.toBytes(bytesReference)); } public void testToChannelBufferWithSliceAfter() throws IOException { BytesReference ref = getRandomizedBytesReference(randomIntBetween(1, 3 * PAGE_SIZE)); int sliceOffset = randomIntBetween(0, ref.length()); int sliceLength = randomIntBetween(ref.length() - sliceOffset, ref.length() - sliceOffset); - ChannelBuffer channelBuffer = NettyUtils.toChannelBuffer(ref); - BytesReference bytesReference = NettyUtils.toBytesReference(channelBuffer); - assertArrayEquals(ref.slice(sliceOffset, sliceLength).toBytes(), bytesReference.slice(sliceOffset, sliceLength).toBytes()); + ChannelBuffer channelBuffer = Netty3Utils.toChannelBuffer(ref); + BytesReference bytesReference = Netty3Utils.toBytesReference(channelBuffer); + assertArrayEquals(BytesReference.toBytes(ref.slice(sliceOffset, sliceLength)), + BytesReference.toBytes(bytesReference.slice(sliceOffset, sliceLength))); } public void testToChannelBuffer() throws IOException { BytesReference ref = getRandomizedBytesReference(randomIntBetween(1, 3 * PAGE_SIZE)); - ChannelBuffer channelBuffer = NettyUtils.toChannelBuffer(ref); - BytesReference bytesReference = NettyUtils.toBytesReference(channelBuffer); + ChannelBuffer channelBuffer = Netty3Utils.toChannelBuffer(ref); + BytesReference bytesReference = Netty3Utils.toBytesReference(channelBuffer); if (ref instanceof ChannelBufferBytesReference) { assertEquals(channelBuffer, ((ChannelBufferBytesReference) ref).toChannelBuffer()); - } else if (ref.hasArray() == false) { // we gather the buffers into a channel buffer + } else if (AbstractBytesReferenceTestCase.getNumPages(ref) > 1) { // we gather the buffers into a channel buffer assertTrue(channelBuffer instanceof CompositeChannelBuffer); } - assertArrayEquals(ref.toBytes(), bytesReference.toBytes()); + assertArrayEquals(BytesReference.toBytes(ref), BytesReference.toBytes(bytesReference)); } private BytesReference getRandomizedBytesReference(int length) throws IOException { @@ -81,13 +84,14 @@ public class NettyUtilsTests extends ESTestCase { BytesReference ref = out.bytes(); assertEquals(ref.length(), length); if (randomBoolean()) { - return ref.toBytesArray(); + return new BytesArray(ref.toBytesRef()); } else if (randomBoolean()) { - BytesArray bytesArray = ref.toBytesArray(); - return NettyUtils.toBytesReference(ChannelBuffers.wrappedBuffer(bytesArray.array(), bytesArray.arrayOffset(), - bytesArray.length())); + BytesRef bytesRef = ref.toBytesRef(); + return Netty3Utils.toBytesReference(ChannelBuffers.wrappedBuffer(bytesRef.bytes, bytesRef.offset, + bytesRef.length)); } else { return ref; } } + } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/SimpleNetty3TransportTests.java similarity index 67% rename from core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java rename to modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/SimpleNetty3TransportTests.java index 96275d099db..db400f4df43 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/SimpleNettyTransportTests.java +++ b/modules/transport-netty3/src/test/java/org/elasticsearch/transport/netty3/SimpleNetty3TransportTests.java @@ -17,17 +17,21 @@ * under the License. */ -package org.elasticsearch.transport.netty; +package org.elasticsearch.transport.netty3; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.AbstractSimpleTransportTestCase; import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportSettings; import java.net.InetAddress; @@ -37,12 +41,26 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.containsString; -public class SimpleNettyTransportTests extends AbstractSimpleTransportTestCase { +public class SimpleNetty3TransportTests extends AbstractSimpleTransportTestCase { + + public static MockTransportService nettyFromThreadPool( + Settings settings, + ThreadPool threadPool, final Version version) { + NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); + Transport transport = new Netty3Transport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, + namedWriteableRegistry, new NoneCircuitBreakerService()) { + @Override + protected Version getCurrentVersion() { + return version; + } + }; + return new MockTransportService(Settings.EMPTY, transport, threadPool); + } @Override protected MockTransportService build(Settings settings, Version version) { settings = Settings.builder().put(settings).put(TransportSettings.PORT.getKey(), "0").build(); - MockTransportService transportService = MockTransportService.nettyFromThreadPool(settings, threadPool, version); + MockTransportService transportService = nettyFromThreadPool(settings, threadPool, version); transportService.start(); return transportService; } diff --git a/modules/transport-netty3/src/test/resources/rest-api-spec/test/10_basic.yaml b/modules/transport-netty3/src/test/resources/rest-api-spec/test/10_basic.yaml new file mode 100644 index 00000000000..eaf51de4484 --- /dev/null +++ b/modules/transport-netty3/src/test/resources/rest-api-spec/test/10_basic.yaml @@ -0,0 +1,13 @@ +# Integration tests for Netty transport +# +"Netty loaded": + - do: + cluster.state: {} + + # Get master node id + - set: { master_node: master } + + - do: + nodes.info: {} + + - match: { nodes.$master.modules.0.name: transport-netty3 } \ No newline at end of file diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java index e04724ee370..eac3ceebc16 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java @@ -40,9 +40,6 @@ import java.util.HashMap; import java.util.Map; import java.util.stream.Collectors; - -/** - */ public class IcuTokenizerFactory extends AbstractTokenizerFactory { private final ICUTokenizerConfig config; @@ -101,8 +98,8 @@ public class IcuTokenizerFactory extends AbstractTokenizerFactory { }; return config; } - } catch (Throwable t) { - throw new ElasticsearchException("failed to load ICU rule files", t); + } catch (Exception e) { + throw new ElasticsearchException("failed to load ICU rule files", e); } } diff --git a/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/20_search.yaml b/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/20_search.yaml index 7fe93458af9..dce852f07ef 100644 --- a/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/20_search.yaml +++ b/plugins/analysis-icu/src/test/resources/rest-api-spec/test/analysis_icu/20_search.yaml @@ -24,9 +24,6 @@ text: type: text analyzer: my_analyzer - - do: - cluster.health: - wait_for_status: yellow - do: index: diff --git a/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/20_search.yaml b/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/20_search.yaml index 77d2aaef80d..490025dda66 100644 --- a/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/20_search.yaml +++ b/plugins/analysis-kuromoji/src/test/resources/rest-api-spec/test/analysis_kuromoji/20_search.yaml @@ -12,9 +12,6 @@ text: type: text analyzer: kuromoji - - do: - cluster.health: - wait_for_status: yellow - do: index: diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/HaasePhonetik.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/HaasePhonetik.java index 728a9354d97..e8b49a50edb 100644 --- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/HaasePhonetik.java +++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/phonetic/HaasePhonetik.java @@ -37,9 +37,9 @@ package org.elasticsearch.index.analysis.phonetic; */ public class HaasePhonetik extends KoelnerPhonetik { - private final static String[] HAASE_VARIATIONS_PATTERNS = {"OWN", "RB", "WSK", "A$", "O$", "SCH", + private static final String[] HAASE_VARIATIONS_PATTERNS = {"OWN", "RB", "WSK", "A$", "O$", "SCH", "GLI", "EAU$", "^CH", "AUX", "EUX", "ILLE"}; - private final static String[] HAASE_VARIATIONS_REPLACEMENTS = {"AUN", "RW", "RSK", "AR", "OW", "CH", + private static final String[] HAASE_VARIATIONS_REPLACEMENTS = {"AUN", "RW", "RSK", "AR", "OW", "CH", "LI", "O", "SCH", "O", "O", "I"}; @Override diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yaml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yaml index 5e45c0a6241..02d4b315b6e 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yaml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yaml @@ -19,9 +19,6 @@ type: phonetic encoder: metaphone replace: false - - do: - cluster.health: - wait_for_status: yellow - do: indices.analyze: index: phonetic_sample diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yaml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yaml index 6481d37bd87..675847e557e 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yaml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yaml @@ -19,9 +19,6 @@ type: phonetic encoder: double_metaphone max_code_len: 6 - - do: - cluster.health: - wait_for_status: yellow - do: indices.analyze: index: phonetic_sample diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yaml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yaml index 68b3f764983..015610af172 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yaml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yaml @@ -21,9 +21,6 @@ rule_type: exact name_type: ashkenazi languageset: polish - - do: - cluster.health: - wait_for_status: yellow - do: indices.analyze: index: phonetic_sample diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/40_search.yaml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/40_search.yaml index fa57d916fc0..933abaafc36 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/40_search.yaml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/40_search.yaml @@ -24,9 +24,6 @@ text: type: text analyzer: my_analyzer - - do: - cluster.health: - wait_for_status: yellow - do: index: diff --git a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yaml b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yaml index b95138f2646..5125ae3d684 100644 --- a/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yaml +++ b/plugins/analysis-phonetic/src/test/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yaml @@ -18,9 +18,6 @@ daitch_mokotoff: type: phonetic encoder: daitch_mokotoff - - do: - cluster.health: - wait_for_status: yellow - do: indices.analyze: index: phonetic_sample diff --git a/plugins/analysis-smartcn/src/test/resources/rest-api-spec/test/analysis_smartcn/20_search.yaml b/plugins/analysis-smartcn/src/test/resources/rest-api-spec/test/analysis_smartcn/20_search.yaml index 5aec77f8ce2..4101bb1646c 100644 --- a/plugins/analysis-smartcn/src/test/resources/rest-api-spec/test/analysis_smartcn/20_search.yaml +++ b/plugins/analysis-smartcn/src/test/resources/rest-api-spec/test/analysis_smartcn/20_search.yaml @@ -12,9 +12,6 @@ text: type: text analyzer: smartcn - - do: - cluster.health: - wait_for_status: yellow - do: index: diff --git a/plugins/analysis-stempel/src/test/resources/rest-api-spec/test/analysis_stempel/20_search.yaml b/plugins/analysis-stempel/src/test/resources/rest-api-spec/test/analysis_stempel/20_search.yaml index f699440cb4f..d23a7e368b5 100644 --- a/plugins/analysis-stempel/src/test/resources/rest-api-spec/test/analysis_stempel/20_search.yaml +++ b/plugins/analysis-stempel/src/test/resources/rest-api-spec/test/analysis_stempel/20_search.yaml @@ -12,9 +12,6 @@ text: type: text analyzer: polish - - do: - cluster.health: - wait_for_status: yellow - do: index: diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceImpl.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceImpl.java index 7d6f81ad0b6..2375db2502b 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceImpl.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceImpl.java @@ -39,7 +39,7 @@ import java.util.ServiceLoader; /** * */ -public class AzureComputeServiceImpl extends AbstractLifecycleComponent +public class AzureComputeServiceImpl extends AbstractLifecycleComponent implements AzureComputeService { private final ComputeManagementClient client; diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/plugin/discovery/azure/classic/AzureDiscoveryPlugin.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/plugin/discovery/azure/classic/AzureDiscoveryPlugin.java index a7e1816fff0..4c0ac173315 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/plugin/discovery/azure/classic/AzureDiscoveryPlugin.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/plugin/discovery/azure/classic/AzureDiscoveryPlugin.java @@ -51,7 +51,7 @@ public class AzureDiscoveryPlugin extends Plugin { } @Override - public Collection nodeModules() { + public Collection createGuiceModules() { return Collections.singletonList((Module) new AzureDiscoveryModule(settings)); } diff --git a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java index e9d16408537..bc5ff91f812 100644 --- a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/cloud/azure/classic/AbstractAzureComputeServiceTestCase.java @@ -43,9 +43,7 @@ public abstract class AbstractAzureComputeServiceTestCase extends ESIntegTestCas protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put("discovery.type", "azure") - // We need the network to make the mock working - .put(Node.NODE_MODE_SETTING.getKey(), "network"); + .put("discovery.type", "azure"); // We add a fake subscription_id to start mock compute service builder.put(Management.SUBSCRIPTION_ID_SETTING.getKey(), "fake") diff --git a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceAbstractMock.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceAbstractMock.java index 33f40a9159a..02de9db6d69 100644 --- a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceAbstractMock.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/cloud/azure/classic/management/AzureComputeServiceAbstractMock.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.settings.Settings; /** * */ -public abstract class AzureComputeServiceAbstractMock extends AbstractLifecycleComponent +public abstract class AzureComputeServiceAbstractMock extends AbstractLifecycleComponent implements AzureComputeService { protected AzureComputeServiceAbstractMock(Settings settings) { diff --git a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureSimpleTests.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureSimpleTests.java index d2234632122..d6d68e75843 100644 --- a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureSimpleTests.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureSimpleTests.java @@ -29,10 +29,7 @@ import org.elasticsearch.test.ESIntegTestCase; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.notNullValue; -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, - numDataNodes = 0, - transportClientRatio = 0.0, - numClientNodes = 0) +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, transportClientRatio = 0.0, numClientNodes = 0) public class AzureSimpleTests extends AbstractAzureComputeServiceTestCase { public AzureSimpleTests() { super(AzureComputeServiceSimpleMock.TestPlugin.class); diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java index 5ce110487c4..649f84b7aed 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java @@ -35,8 +35,6 @@ import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.AmazonEC2Client; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cloud.aws.network.Ec2NameResolver; -import org.elasticsearch.cloud.aws.node.Ec2CustomNodeAttributes; -import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -49,18 +47,17 @@ import java.util.Random; /** * */ -public class AwsEc2ServiceImpl extends AbstractLifecycleComponent implements AwsEc2Service { +public class AwsEc2ServiceImpl extends AbstractLifecycleComponent implements AwsEc2Service { public static final String EC2_METADATA_URL = "http://169.254.169.254/latest/meta-data/"; private AmazonEC2Client client; @Inject - public AwsEc2ServiceImpl(Settings settings, NetworkService networkService, DiscoveryNodeService discoveryNodeService) { + public AwsEc2ServiceImpl(Settings settings, NetworkService networkService) { super(settings); // add specific ec2 name resolver networkService.addCustomNameResolver(new Ec2NameResolver(settings)); - discoveryNodeService.addCustomAttributeProvider(new Ec2CustomNodeAttributes(settings)); } @Override diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/node/Ec2CustomNodeAttributes.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/node/Ec2CustomNodeAttributes.java deleted file mode 100644 index 9ba1ce650e8..00000000000 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/node/Ec2CustomNodeAttributes.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cloud.aws.node; - -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.cloud.aws.AwsEc2Service; -import org.elasticsearch.cloud.aws.AwsEc2ServiceImpl; -import org.elasticsearch.cluster.node.DiscoveryNodeService; -import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.Settings; - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.net.URL; -import java.net.URLConnection; -import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.Map; - -/** - */ -public class Ec2CustomNodeAttributes extends AbstractComponent implements DiscoveryNodeService.CustomAttributesProvider { - - public Ec2CustomNodeAttributes(Settings settings) { - super(settings); - } - - @Override - public Map buildAttributes() { - if (AwsEc2Service.AUTO_ATTRIBUTE_SETTING.get(settings) == false) { - return null; - } - Map ec2Attributes = new HashMap<>(); - - URLConnection urlConnection; - InputStream in = null; - try { - URL url = new URL(AwsEc2ServiceImpl.EC2_METADATA_URL + "placement/availability-zone"); - logger.debug("obtaining ec2 [placement/availability-zone] from ec2 meta-data url {}", url); - urlConnection = url.openConnection(); - urlConnection.setConnectTimeout(2000); - in = urlConnection.getInputStream(); - BufferedReader urlReader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8)); - - String metadataResult = urlReader.readLine(); - if (metadataResult == null || metadataResult.length() == 0) { - logger.error("no ec2 metadata returned from {}", url); - return null; - } - ec2Attributes.put("aws_availability_zone", metadataResult); - } catch (IOException e) { - logger.debug("failed to get metadata for [placement/availability-zone]", e); - } finally { - IOUtils.closeWhileHandlingException(in); - } - - return ec2Attributes; - } -} diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java index b5f179102a7..eb4afbcf784 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPlugin.java @@ -19,6 +19,21 @@ package org.elasticsearch.plugin.discovery.ec2; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.UncheckedIOException; +import java.net.URL; +import java.net.URLConnection; +import java.nio.charset.StandardCharsets; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + import org.elasticsearch.SpecialPermission; import org.elasticsearch.cloud.aws.AwsEc2Service; import org.elasticsearch.cloud.aws.AwsEc2ServiceImpl; @@ -29,24 +44,19 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.ec2.AwsEc2UnicastHostsProvider; import org.elasticsearch.discovery.zen.ZenDiscovery; +import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; - /** * */ public class Ec2DiscoveryPlugin extends Plugin { + private static ESLogger logger = Loggers.getLogger(Ec2DiscoveryPlugin.class); + public static final String EC2 = "ec2"; // ClientConfiguration clinit has some classloader problems @@ -69,8 +79,14 @@ public class Ec2DiscoveryPlugin extends Plugin { }); } + private Settings settings; + + public Ec2DiscoveryPlugin(Settings settings) { + this.settings = settings; + } + @Override - public Collection nodeModules() { + public Collection createGuiceModules() { Collection modules = new ArrayList<>(); modules.add(new Ec2Module()); return modules; @@ -78,7 +94,7 @@ public class Ec2DiscoveryPlugin extends Plugin { @Override @SuppressWarnings("rawtypes") // Supertype uses rawtype - public Collection> nodeServices() { + public Collection> getGuiceServiceClasses() { Collection> services = new ArrayList<>(); services.add(AwsEc2ServiceImpl.class); return services; @@ -123,4 +139,46 @@ public class Ec2DiscoveryPlugin extends Plugin { // Register cloud node settings: cloud.node AwsEc2Service.AUTO_ATTRIBUTE_SETTING); } + + /** Adds a node attribute for the ec2 availability zone. */ + @Override + public Settings additionalSettings() { + return getAvailabilityZoneNodeAttributes(settings, AwsEc2ServiceImpl.EC2_METADATA_URL + "placement/availability-zone"); + } + + // pkg private for testing + static Settings getAvailabilityZoneNodeAttributes(Settings settings, String azMetadataUrl) { + if (AwsEc2Service.AUTO_ATTRIBUTE_SETTING.get(settings) == false) { + return Settings.EMPTY; + } + Settings.Builder attrs = Settings.builder(); + + final URL url; + final URLConnection urlConnection; + try { + url = new URL(azMetadataUrl); + logger.debug("obtaining ec2 [placement/availability-zone] from ec2 meta-data url {}", url); + urlConnection = url.openConnection(); + urlConnection.setConnectTimeout(2000); + } catch (IOException e) { + // should not happen, we know the url is not malformed, and openConnection does not actually hit network + throw new UncheckedIOException(e); + } + + try (InputStream in = urlConnection.getInputStream(); + BufferedReader urlReader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8))) { + + String metadataResult = urlReader.readLine(); + if (metadataResult == null || metadataResult.length() == 0) { + throw new IllegalStateException("no ec2 metadata returned from " + url); + } else { + attrs.put(Node.NODE_ATTRIBUTES.getKey() + "aws_availability_zone", metadataResult); + } + } catch (IOException e) { + // this is lenient so the plugin does not fail when installed outside of ec2 + logger.error("failed to get metadata for [placement/availability-zone]", e); + } + + return attrs.build(); + } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java index c313f4bd195..555e9f5c10a 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/cloud/aws/AWSSignersTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.cloud.aws; import com.amazonaws.ClientConfiguration; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.discovery.ec2.Ec2DiscoveryPlugin; import org.elasticsearch.test.ESTestCase; import org.junit.BeforeClass; @@ -34,7 +35,7 @@ public class AWSSignersTests extends ESTestCase { */ @BeforeClass public static void instantiatePlugin() { - new Ec2DiscoveryPlugin(); + new Ec2DiscoveryPlugin(Settings.EMPTY); } public void testSigners() { diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java index f14a80f01cc..19a5c8d3b96 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.settings.Settings; import java.util.List; -public class AwsEc2ServiceMock extends AbstractLifecycleComponent implements AwsEc2Service { +public class AwsEc2ServiceMock extends AbstractLifecycleComponent implements AwsEc2Service { private int nodes; private List> tagsList; diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryClusterFormationTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryClusterFormationTests.java index 54f70d80cfb..ebada53025c 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryClusterFormationTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryClusterFormationTests.java @@ -22,6 +22,8 @@ package org.elasticsearch.discovery.ec2; import com.amazonaws.util.IOUtils; import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpServer; +import org.apache.http.NameValuePair; +import org.apache.http.client.utils.URLEncodedUtils; import org.elasticsearch.cloud.aws.AwsEc2Service; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.FileSystemUtils; @@ -30,7 +32,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugin.discovery.ec2.Ec2DiscoveryPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.jboss.netty.handler.codec.http.QueryStringDecoder; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -49,7 +50,6 @@ import java.nio.file.Path; import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Map; import java.util.UUID; import java.util.concurrent.ExecutionException; @@ -101,9 +101,13 @@ public class Ec2DiscoveryClusterFormationTests extends ESIntegTestCase { httpServer.createContext("/", (s) -> { Headers headers = s.getResponseHeaders(); headers.add("Content-Type", "text/xml; charset=UTF-8"); - QueryStringDecoder decoder = new QueryStringDecoder("?" + IOUtils.toString(s.getRequestBody())); - Map> queryParams = decoder.getParameters(); - String action = queryParams.get("Action").get(0); + String action = null; + for (NameValuePair parse : URLEncodedUtils.parse(IOUtils.toString(s.getRequestBody()), StandardCharsets.UTF_8)) { + if ("Action".equals(parse.getName())) { + action = parse.getValue(); + break; + } + } assertThat(action, equalTo("DescribeInstances")); XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newFactory(); diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPluginTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPluginTests.java new file mode 100644 index 00000000000..14ba0d2339a --- /dev/null +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/plugin/discovery/ec2/Ec2DiscoveryPluginTests.java @@ -0,0 +1,83 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugin.discovery.ec2; + +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; + +import org.elasticsearch.cloud.aws.AwsEc2Service; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.node.Node; +import org.elasticsearch.test.ESTestCase; + +public class Ec2DiscoveryPluginTests extends ESTestCase { + + private Settings getNodeAttributes(Settings settings, String url) { + Settings realSettings = Settings.builder() + .put(AwsEc2Service.AUTO_ATTRIBUTE_SETTING.getKey(), true) + .put(settings).build(); + return Ec2DiscoveryPlugin.getAvailabilityZoneNodeAttributes(realSettings, url); + } + + private void assertNodeAttributes(Settings settings, String url, String expected) { + Settings additional = getNodeAttributes(settings, url); + if (expected == null) { + assertTrue(additional.isEmpty()); + } else { + assertEquals(expected, additional.get(Node.NODE_ATTRIBUTES.getKey() + "aws_availability_zone")); + } + } + + public void testNodeAttributesDisabled() { + Settings settings = Settings.builder() + .put(AwsEc2Service.AUTO_ATTRIBUTE_SETTING.getKey(), false).build(); + assertNodeAttributes(settings, "bogus", null); + } + + public void testNodeAttributes() throws Exception { + Path zoneUrl = createTempFile(); + Files.write(zoneUrl, Arrays.asList("us-east-1c")); + assertNodeAttributes(Settings.EMPTY, zoneUrl.toUri().toURL().toString(), "us-east-1c"); + } + + public void testNodeAttributesBogusUrl() { + UncheckedIOException e = expectThrows(UncheckedIOException.class, () -> + getNodeAttributes(Settings.EMPTY, "bogus") + ); + assertNotNull(e.getCause()); + String msg = e.getCause().getMessage(); + assertTrue(msg, msg.contains("no protocol: bogus")); + } + + public void testNodeAttributesEmpty() throws Exception { + Path zoneUrl = createTempFile(); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> + getNodeAttributes(Settings.EMPTY, zoneUrl.toUri().toURL().toString()) + ); + assertTrue(e.getMessage(), e.getMessage().contains("no ec2 metadata returned")); + } + + public void testNodeAttributesErrorLenient() throws Exception { + Path dne = createTempDir().resolve("dne"); + assertNodeAttributes(Settings.EMPTY, dne.toUri().toURL().toString(), null); + } +} diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesService.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesService.java index 5d0d405fbc8..39db86c672a 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesService.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesService.java @@ -30,7 +30,7 @@ import java.util.Collections; import java.util.List; import java.util.function.Function; -public interface GceInstancesService extends LifecycleComponent { +public interface GceInstancesService extends LifecycleComponent { /** * GCE API Version: Elasticsearch/GceCloud/1.0 diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java index ff96aca3453..c91797761e9 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java @@ -49,8 +49,7 @@ import java.util.Collections; import java.util.List; import java.util.function.Function; -public class GceInstancesServiceImpl extends AbstractLifecycleComponent - implements GceInstancesService { +public class GceInstancesServiceImpl extends AbstractLifecycleComponent implements GceInstancesService { // all settings just used for testing - not registered by default public static final Setting GCE_VALIDATE_CERTIFICATES = diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index de02ff8a01d..c73df8f8395 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -249,8 +249,8 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas } } - } catch (Throwable e) { - logger.warn("Exception caught during discovery: {}", e, e.getMessage()); + } catch (Exception e) { + logger.warn("exception caught during discovery", e); } logger.debug("{} node(s) added", cachedDiscoNodes.size()); diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java index 2b55e038a98..59bf05e3269 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java @@ -78,18 +78,17 @@ public class GceDiscoveryPlugin extends Plugin { } @Override - public Collection nodeModules() { + public Collection createGuiceModules() { return Collections.singletonList(new GceModule(settings)); } @Override @SuppressWarnings("rawtypes") // Supertype uses raw type - public Collection> nodeServices() { + public Collection> getGuiceServiceClasses() { logger.debug("Register gce compute and metadata services"); - Collection> services = new ArrayList<>(); - services.add(GceModule.getComputeServiceImpl()); - services.add(GceModule.getMetadataServiceImpl()); - return services; + return Collections.singletonList( + GceModule.getComputeServiceImpl(), + GceModule.getMetadataServiceImpl()); } public void onModule(DiscoveryModule discoveryModule) { diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java index aa6e91fabdf..a49124749fe 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/RetryHttpInitializerWrapperTests.java @@ -44,7 +44,7 @@ import static org.hamcrest.Matchers.lessThan; public class RetryHttpInitializerWrapperTests extends ESTestCase { - static private class FailThenSuccessBackoffTransport extends MockHttpTransport { + private static class FailThenSuccessBackoffTransport extends MockHttpTransport { public int lowLevelExecCalls; int errorStatusCode; diff --git a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java index 40ebe2592ab..4036fb0d688 100644 --- a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java +++ b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/AttachmentProcessor.java @@ -25,8 +25,8 @@ import org.apache.tika.metadata.TikaCoreProperties; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; import java.io.IOException; import java.util.Arrays; @@ -122,7 +122,7 @@ public final class AttachmentProcessor extends AbstractProcessor { String length = Strings.hasLength(contentLength) ? contentLength : String.valueOf(parsedContent.length()); additionalFields.put(Property.CONTENT_LENGTH.toLowerCase(), length); } - } catch (Throwable e) { + } catch (Exception e) { throw new ElasticsearchParseException("Error parsing document in field [{}]", e, field); } @@ -150,12 +150,13 @@ public final class AttachmentProcessor extends AbstractProcessor { return indexedChars; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { static final Set DEFAULT_PROPERTIES = EnumSet.allOf(Property.class); @Override - public AttachmentProcessor doCreate(String processorTag, Map config) throws Exception { + public AttachmentProcessor create(Map registry, String processorTag, + Map config) throws Exception { String field = readStringProperty(TYPE, processorTag, config, "field"); String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "attachment"); List properyNames = readOptionalList(TYPE, processorTag, config, "properties"); diff --git a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/IngestAttachmentPlugin.java b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/IngestAttachmentPlugin.java index 3156fe381fd..7846f0ad28c 100644 --- a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/IngestAttachmentPlugin.java +++ b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/IngestAttachmentPlugin.java @@ -19,15 +19,17 @@ package org.elasticsearch.ingest.attachment; -import org.elasticsearch.node.NodeModule; +import java.util.Collections; +import java.util.Map; + +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.Plugin; -import java.io.IOException; +public class IngestAttachmentPlugin extends Plugin implements IngestPlugin { -public class IngestAttachmentPlugin extends Plugin { - - public void onModule(NodeModule nodeModule) throws IOException { - nodeModule.registerProcessor(AttachmentProcessor.TYPE, - (registry) -> new AttachmentProcessor.Factory()); + @Override + public Map getProcessors(Processor.Parameters parameters) { + return Collections.singletonMap(AttachmentProcessor.TYPE, new AttachmentProcessor.Factory()); } } diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java index 6bd4e07702e..2848d6c3c64 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorFactoryTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.ingest.attachment; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.ingest.AbstractProcessorFactory; +import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -46,9 +46,8 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - AttachmentProcessor processor = factory.create(config); + AttachmentProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("attachment")); @@ -62,8 +61,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { config.put("indexed_chars", indexedChars); String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - AttachmentProcessor processor = factory.create(config); + AttachmentProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getIndexedChars(), is(indexedChars)); } @@ -72,7 +70,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("target_field", "_field"); - AttachmentProcessor processor = factory.create(config); + AttachmentProcessor processor = factory.create(null, null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("_field")); } @@ -89,7 +87,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("properties", fieldNames); - AttachmentProcessor processor = factory.create(config); + AttachmentProcessor processor = factory.create(null, null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getProperties(), equalTo(properties)); } @@ -99,7 +97,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("properties", Collections.singletonList("invalid")); try { - factory.create(config); + factory.create(null, null, config); fail("exception expected"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("[properties] illegal field option [invalid]")); @@ -113,7 +111,7 @@ public class AttachmentProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("properties", "invalid"); try { - factory.create(config); + factory.create(null, null, config); fail("exception expected"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]")); diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/TikaDocTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/TikaDocTests.java index 0c63f65c247..4b9a40dd8a9 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/TikaDocTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/TikaDocTests.java @@ -58,7 +58,7 @@ public class TikaDocTests extends ESTestCase { assertNotNull(parsedContent); assertFalse(parsedContent.isEmpty()); logger.debug("extracted content: {}", parsedContent); - } catch (Throwable e) { + } catch (Exception e) { throw new RuntimeException("parsing of filename: " + fileName.getFileName() + " failed", e); } } diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java index 0b18cae25e7..5923e3b690e 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpProcessor.java @@ -19,25 +19,6 @@ package org.elasticsearch.ingest.geoip; -import com.maxmind.geoip2.DatabaseReader; -import com.maxmind.geoip2.exception.AddressNotFoundException; -import com.maxmind.geoip2.model.CityResponse; -import com.maxmind.geoip2.model.CountryResponse; -import com.maxmind.geoip2.record.City; -import com.maxmind.geoip2.record.Continent; -import com.maxmind.geoip2.record.Country; -import com.maxmind.geoip2.record.Location; -import com.maxmind.geoip2.record.Subdivision; -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.SpecialPermission; -import org.elasticsearch.common.network.InetAddresses; -import org.elasticsearch.common.network.NetworkAddress; -import org.elasticsearch.ingest.AbstractProcessor; -import org.elasticsearch.ingest.AbstractProcessorFactory; -import org.elasticsearch.ingest.IngestDocument; - -import java.io.Closeable; import java.io.IOException; import java.net.InetAddress; import java.security.AccessController; @@ -51,6 +32,23 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import com.maxmind.geoip2.DatabaseReader; +import com.maxmind.geoip2.exception.AddressNotFoundException; +import com.maxmind.geoip2.model.CityResponse; +import com.maxmind.geoip2.model.CountryResponse; +import com.maxmind.geoip2.record.City; +import com.maxmind.geoip2.record.Continent; +import com.maxmind.geoip2.record.Country; +import com.maxmind.geoip2.record.Location; +import com.maxmind.geoip2.record.Subdivision; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.network.InetAddresses; +import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.ingest.AbstractProcessor; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; + import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalList; import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty; @@ -217,7 +215,7 @@ public final class GeoIpProcessor extends AbstractProcessor { return geoData; } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { static final Set DEFAULT_CITY_PROPERTIES = EnumSet.of( Property.CONTINENT_NAME, Property.COUNTRY_ISO_CODE, Property.REGION_NAME, Property.CITY_NAME, Property.LOCATION @@ -231,7 +229,8 @@ public final class GeoIpProcessor extends AbstractProcessor { } @Override - public GeoIpProcessor doCreate(String processorTag, Map config) throws Exception { + public GeoIpProcessor create(Map registry, String processorTag, + Map config) throws Exception { String ipField = readStringProperty(TYPE, processorTag, config, "field"); String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "geoip"); String databaseFile = readStringProperty(TYPE, processorTag, config, "database_file", "GeoLite2-City.mmdb.gz"); @@ -239,7 +238,8 @@ public final class GeoIpProcessor extends AbstractProcessor { DatabaseReader databaseReader = databaseReaders.get(databaseFile); if (databaseReader == null) { - throw newConfigurationException(TYPE, processorTag, "database_file", "database file [" + databaseFile + "] doesn't exist"); + throw newConfigurationException(TYPE, processorTag, + "database_file", "database file [" + databaseFile + "] doesn't exist"); } String databaseType = databaseReader.getMetadata().getDatabaseType(); @@ -272,7 +272,7 @@ public final class GeoIpProcessor extends AbstractProcessor { // Geoip2's AddressNotFoundException is checked and due to the fact that we need run their code // inside a PrivilegedAction code block, we are forced to catch any checked exception and rethrow // it with an unchecked exception. - private final static class AddressNotFoundRuntimeException extends RuntimeException { + private static final class AddressNotFoundRuntimeException extends RuntimeException { public AddressNotFoundRuntimeException(Throwable cause) { super(cause); diff --git a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index d814ae46bea..2190036c7fc 100644 --- a/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/plugins/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -19,11 +19,6 @@ package org.elasticsearch.ingest.geoip; -import com.maxmind.geoip2.DatabaseReader; -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.node.NodeModule; -import org.elasticsearch.plugins.Plugin; - import java.io.Closeable; import java.io.IOException; import java.io.InputStream; @@ -38,20 +33,31 @@ import java.util.Map; import java.util.stream.Stream; import java.util.zip.GZIPInputStream; -public class IngestGeoIpPlugin extends Plugin implements Closeable { +import com.maxmind.geoip2.DatabaseReader; +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.plugins.IngestPlugin; +import org.elasticsearch.plugins.Plugin; + +public class IngestGeoIpPlugin extends Plugin implements IngestPlugin, Closeable { private Map databaseReaders; - public void onModule(NodeModule nodeModule) throws IOException { + @Override + public Map getProcessors(Processor.Parameters parameters) { if (databaseReaders != null) { throw new IllegalStateException("called onModule twice for geoip plugin!!"); } - Path geoIpConfigDirectory = nodeModule.getNode().getEnvironment().configFile().resolve("ingest-geoip"); - databaseReaders = loadDatabaseReaders(geoIpConfigDirectory); - nodeModule.registerProcessor(GeoIpProcessor.TYPE, (registry) -> new GeoIpProcessor.Factory(databaseReaders)); + Path geoIpConfigDirectory = parameters.env.configFile().resolve("ingest-geoip"); + try { + databaseReaders = loadDatabaseReaders(geoIpConfigDirectory); + } catch (IOException e) { + throw new RuntimeException(e); + } + return Collections.singletonMap(GeoIpProcessor.TYPE, new GeoIpProcessor.Factory(databaseReaders)); } - public static Map loadDatabaseReaders(Path geoIpConfigDirectory) throws IOException { + static Map loadDatabaseReaders(Path geoIpConfigDirectory) throws IOException { if (Files.exists(geoIpConfigDirectory) == false && Files.isDirectory(geoIpConfigDirectory)) { throw new IllegalStateException("the geoip directory [" + geoIpConfigDirectory + "] containing databases doesn't exist"); } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java index 04729162729..ec4db09cd96 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/GeoIpProcessorFactoryTests.java @@ -23,7 +23,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.maxmind.geoip2.DatabaseReader; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Randomness; -import org.elasticsearch.ingest.AbstractProcessorFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; import org.junit.AfterClass; @@ -74,11 +73,9 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); - String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - GeoIpProcessor processor = factory.create(config); + GeoIpProcessor processor = factory.create(null, processorTag, config); assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); @@ -92,11 +89,10 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb.gz"); - String processorTag = randomAsciiOfLength(10); - config.put(AbstractProcessorFactory.TAG_KEY, processorTag); - GeoIpProcessor processor = factory.create(config); + GeoIpProcessor processor = factory.create(null, processorTag, config); + assertThat(processor.getTag(), equalTo(processorTag)); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); @@ -109,7 +105,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("target_field", "_field"); - GeoIpProcessor processor = factory.create(config); + GeoIpProcessor processor = factory.create(null, null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("_field")); } @@ -119,7 +115,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("database_file", "GeoLite2-Country.mmdb.gz"); - GeoIpProcessor processor = factory.create(config); + GeoIpProcessor processor = factory.create(null, null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getTargetField(), equalTo("geoip")); assertThat(processor.getDbReader().getMetadata().getDatabaseType(), equalTo("GeoLite2-Country")); @@ -135,7 +131,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { String cityProperty = RandomPicks.randomFrom(Randomness.get(), cityOnlyProperties).toString(); config.put("properties", Collections.singletonList(cityProperty)); try { - factory.create(config); + factory.create(null, null, config); fail("Exception expected"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[properties] illegal property value [" + cityProperty + @@ -150,7 +146,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("database_file", "does-not-exist.mmdb.gz"); try { - factory.create(config); + factory.create(null, null, config); fail("Exception expected"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[database_file] database file [does-not-exist.mmdb.gz] doesn't exist")); @@ -171,7 +167,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { Map config = new HashMap<>(); config.put("field", "_field"); config.put("properties", fieldNames); - GeoIpProcessor processor = factory.create(config); + GeoIpProcessor processor = factory.create(null, null, config); assertThat(processor.getField(), equalTo("_field")); assertThat(processor.getProperties(), equalTo(properties)); } @@ -183,7 +179,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("properties", Collections.singletonList("invalid")); try { - factory.create(config); + factory.create(null, null, config); fail("exception expected"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [IP, COUNTRY_ISO_CODE, " + @@ -194,7 +190,7 @@ public class GeoIpProcessorFactoryTests extends ESTestCase { config.put("field", "_field"); config.put("properties", "invalid"); try { - factory.create(config); + factory.create(null, null, config); fail("exception expected"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]")); diff --git a/core/src/main/java/org/elasticsearch/common/io/CharSequenceReader.java b/plugins/ingest-user-agent/build.gradle similarity index 74% rename from core/src/main/java/org/elasticsearch/common/io/CharSequenceReader.java rename to plugins/ingest-user-agent/build.gradle index cb6c3bc6af1..ec599874d15 100644 --- a/core/src/main/java/org/elasticsearch/common/io/CharSequenceReader.java +++ b/plugins/ingest-user-agent/build.gradle @@ -16,13 +16,14 @@ * specific language governing permissions and limitations * under the License. */ - -package org.elasticsearch.common.io; - -import java.io.Reader; - -/** - * - */ -public abstract class CharSequenceReader extends Reader implements CharSequence { + +esplugin { + description 'Ingest processor that extracts information from a user agent' + classname 'org.elasticsearch.ingest.useragent.IngestUserAgentPlugin' } + +integTest { + cluster { + extraConfigFile 'ingest-user-agent/test-regexes.yaml', 'test/test-regexes.yaml' + } +} \ No newline at end of file diff --git a/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java b/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java new file mode 100644 index 00000000000..ce82d6e1c4d --- /dev/null +++ b/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/IngestUserAgentPlugin.java @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.useragent; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.plugins.IngestPlugin; +import org.elasticsearch.plugins.Plugin; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.PathMatcher; +import java.nio.file.StandardOpenOption; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Stream; + +public class IngestUserAgentPlugin extends Plugin implements IngestPlugin { + + private final Setting CACHE_SIZE_SETTING = Setting.longSetting("ingest.user_agent.cache_size", 1000, 0, + Setting.Property.NodeScope); + + static final String DEFAULT_PARSER_NAME = "_default_"; + + @Override + public Map getProcessors(Processor.Parameters parameters) { + Path userAgentConfigDirectory = parameters.env.configFile().resolve("ingest-user-agent"); + + if (Files.exists(userAgentConfigDirectory) == false && Files.isDirectory(userAgentConfigDirectory)) { + throw new IllegalStateException( + "the user agent directory [" + userAgentConfigDirectory + "] containing the regex file doesn't exist"); + } + + long cacheSize = CACHE_SIZE_SETTING.get(parameters.env.settings()); + Map userAgentParsers; + try { + userAgentParsers = createUserAgentParsers(userAgentConfigDirectory, new UserAgentCache(cacheSize)); + } catch (IOException e) { + throw new RuntimeException(e); + } + return Collections.singletonMap(UserAgentProcessor.TYPE, new UserAgentProcessor.Factory(userAgentParsers)); + } + + static Map createUserAgentParsers(Path userAgentConfigDirectory, UserAgentCache cache) throws IOException { + Map userAgentParsers = new HashMap<>(); + + UserAgentParser defaultParser = new UserAgentParser(DEFAULT_PARSER_NAME, + IngestUserAgentPlugin.class.getResourceAsStream("/regexes.yaml"), cache); + userAgentParsers.put(DEFAULT_PARSER_NAME, defaultParser); + + if (Files.exists(userAgentConfigDirectory) && Files.isDirectory(userAgentConfigDirectory)) { + PathMatcher pathMatcher = userAgentConfigDirectory.getFileSystem().getPathMatcher("glob:**.yaml"); + + try (Stream regexFiles = Files.find(userAgentConfigDirectory, 1, + (path, attr) -> attr.isRegularFile() && pathMatcher.matches(path))) { + Iterable iterable = regexFiles::iterator; + for (Path path : iterable) { + String parserName = path.getFileName().toString(); + try (InputStream regexStream = Files.newInputStream(path, StandardOpenOption.READ)) { + userAgentParsers.put(parserName, new UserAgentParser(parserName, regexStream, cache)); + } + } + } + } + + return Collections.unmodifiableMap(userAgentParsers); + } + +} diff --git a/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentCache.java b/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentCache.java new file mode 100644 index 00000000000..d1002f2df06 --- /dev/null +++ b/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentCache.java @@ -0,0 +1,66 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.useragent; + +import org.elasticsearch.common.cache.Cache; +import org.elasticsearch.common.cache.CacheBuilder; +import org.elasticsearch.ingest.useragent.UserAgentParser.Details; + +import java.util.Objects; + +class UserAgentCache { + private final Cache cache; + + UserAgentCache(long cacheSize) { + cache = CacheBuilder.builder().setMaximumWeight(cacheSize).build(); + } + + public Details get(String parserName, String userAgent) { + return cache.get(new CompositeCacheKey(parserName, userAgent)); + } + + public void put(String parserName, String userAgent, Details details) { + cache.put(new CompositeCacheKey(parserName, userAgent), details); + } + + private static final class CompositeCacheKey { + private final String parserName; + private final String userAgent; + + CompositeCacheKey(String parserName, String userAgent) { + this.parserName = parserName; + this.userAgent = userAgent; + } + + @Override + public boolean equals(Object obj) { + if(obj != null && obj instanceof CompositeCacheKey) { + CompositeCacheKey s = (CompositeCacheKey)obj; + return parserName.equals(s.parserName) && userAgent.equals(s.userAgent); + } + return false; + } + + @Override + public int hashCode() { + return Objects.hash(parserName, userAgent); + } + } +} diff --git a/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java b/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java new file mode 100644 index 00000000000..af764d5baf2 --- /dev/null +++ b/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java @@ -0,0 +1,280 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.useragent; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +final class UserAgentParser { + + private final UserAgentCache cache; + private final List uaPatterns = new ArrayList<>(); + private final List osPatterns = new ArrayList<>(); + private final List devicePatterns = new ArrayList<>(); + private final String name; + + public UserAgentParser(String name, InputStream regexStream, UserAgentCache cache) { + this.name = name; + this.cache = cache; + + try { + init(regexStream); + } catch (IOException e) { + throw new ElasticsearchParseException("error parsing regular expression file", e); + } + } + + private void init(InputStream regexStream) throws IOException { + XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML).createParser(regexStream); + + XContentParser.Token token = yamlParser.nextToken(); + + if (token == XContentParser.Token.START_OBJECT) { + token = yamlParser.nextToken(); + + for (; token != null; token = yamlParser.nextToken()) { + if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("user_agent_parsers")) { + List> parserConfigurations = readParserConfigurations(yamlParser); + + for (Map map : parserConfigurations) { + uaPatterns.add(new UserAgentSubpattern(compilePattern(map.get("regex"), map.get("regex_flag")), + map.get("family_replacement"), map.get("v1_replacement"), map.get("v2_replacement"), + map.get("v3_replacement"), map.get("v4_replacement"))); + } + } + else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("os_parsers")) { + List> parserConfigurations = readParserConfigurations(yamlParser); + + for (Map map : parserConfigurations) { + osPatterns.add(new UserAgentSubpattern(compilePattern(map.get("regex"), map.get("regex_flag")), + map.get("os_replacement"), map.get("os_v1_replacement"), map.get("os_v2_replacement"), + map.get("os_v3_replacement"), map.get("os_v4_replacement"))); + } + } + else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("device_parsers")) { + List> parserConfigurations = readParserConfigurations(yamlParser); + + for (Map map : parserConfigurations) { + devicePatterns.add(new UserAgentSubpattern(compilePattern(map.get("regex"), map.get("regex_flag")), + map.get("device_replacement"), null, null, null, null)); + } + } + } + } + + if (uaPatterns.isEmpty() && osPatterns.isEmpty() && devicePatterns.isEmpty()) { + throw new ElasticsearchParseException("not a valid regular expression file"); + } + } + + private Pattern compilePattern(String regex, String regex_flag) { + // Only flag present in the current default regexes.yaml + if (regex_flag != null && regex_flag.equals("i")) { + return Pattern.compile(regex, Pattern.CASE_INSENSITIVE); + } else { + return Pattern.compile(regex); + } + } + + private List> readParserConfigurations(XContentParser yamlParser) throws IOException { + List > patternList = new ArrayList<>(); + + XContentParser.Token token = yamlParser.nextToken(); + if (token != XContentParser.Token.START_ARRAY) { + throw new ElasticsearchParseException("malformed regular expression file, should continue with 'array' after 'object'"); + } + + token = yamlParser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("malformed regular expression file, expecting 'object'"); + } + + while (token == XContentParser.Token.START_OBJECT) { + token = yamlParser.nextToken(); + + if (token != XContentParser.Token.FIELD_NAME) { + throw new ElasticsearchParseException("malformed regular expression file, should continue with 'field_name' after 'array'"); + } + + Map regexMap = new HashMap<>(); + for (; token == XContentParser.Token.FIELD_NAME; token = yamlParser.nextToken()) { + String fieldName = yamlParser.currentName(); + + token = yamlParser.nextToken(); + String fieldValue = yamlParser.text(); + regexMap.put(fieldName, fieldValue); + } + + patternList.add(regexMap); + + token = yamlParser.nextToken(); + } + + return patternList; + } + + List getUaPatterns() { + return uaPatterns; + } + + List getOsPatterns() { + return osPatterns; + } + + List getDevicePatterns() { + return devicePatterns; + } + + String getName() { + return name; + } + + public Details parse(String agentString) { + Details details = cache.get(name, agentString);; + + if (details == null) { + VersionedName userAgent = findMatch(uaPatterns, agentString); + VersionedName operatingSystem = findMatch(osPatterns, agentString); + VersionedName device = findMatch(devicePatterns, agentString); + + details = new Details(userAgent, operatingSystem, device); + + cache.put(name, agentString, details); + } + + return details; + } + + private VersionedName findMatch(List possiblePatterns, String agentString) { + VersionedName name; + for (UserAgentSubpattern pattern : possiblePatterns) { + name = pattern.match(agentString); + + if (name != null) { + return name; + } + } + + return null; + } + + static final class Details { + public final VersionedName userAgent; + public final VersionedName operatingSystem; + public final VersionedName device; + + public Details(VersionedName userAgent, VersionedName operatingSystem, VersionedName device) { + this.userAgent = userAgent; + this.operatingSystem = operatingSystem; + this.device = device; + } + } + + static final class VersionedName { + public final String name; + public final String major; + public final String minor; + public final String patch; + public final String build; + + public VersionedName(String name, String major, String minor, String patch, String build) { + this.name = name; + this.major = major; + this.minor = minor; + this.patch = patch; + this.build = build; + } + } + + /** + * One of: user agent, operating system, device + */ + static final class UserAgentSubpattern { + private final Pattern pattern; + private final String nameReplacement, v1Replacement, v2Replacement, v3Replacement, v4Replacement; + + public UserAgentSubpattern(Pattern pattern, String nameReplacement, + String v1Replacement, String v2Replacement, String v3Replacement, String v4Replacement) { + this.pattern = pattern; + this.nameReplacement = nameReplacement; + this.v1Replacement = v1Replacement; + this.v2Replacement = v2Replacement; + this.v3Replacement = v3Replacement; + this.v4Replacement = v4Replacement; + } + + public VersionedName match(String agentString) { + String name = null, major = null, minor = null, patch = null, build = null; + Matcher matcher = pattern.matcher(agentString); + + if (!matcher.find()) { + return null; + } + + int groupCount = matcher.groupCount(); + + if (nameReplacement != null) { + if (nameReplacement.contains("$1") && groupCount >= 1 && matcher.group(1) != null) { + name = nameReplacement.replaceFirst("\\$1", Matcher.quoteReplacement(matcher.group(1))); + } else { + name = nameReplacement; + } + } else if (groupCount >= 1) { + name = matcher.group(1); + } + + if (v1Replacement != null) { + major = v1Replacement; + } else if (groupCount >= 2) { + major = matcher.group(2); + } + + if (v2Replacement != null) { + minor = v2Replacement; + } else if (groupCount >= 3) { + minor = matcher.group(3); + } + + if (v3Replacement != null) { + patch = v3Replacement; + } else if (groupCount >= 4) { + patch = matcher.group(4); + } + + if (v4Replacement != null) { + build = v4Replacement; + } else if (groupCount >= 5) { + build = matcher.group(5); + } + + return name == null ? null : new VersionedName(name, major, minor, patch, build); + } + } +} diff --git a/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java b/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java new file mode 100644 index 00000000000..ec18126457b --- /dev/null +++ b/plugins/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentProcessor.java @@ -0,0 +1,242 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.useragent; + +import org.elasticsearch.ingest.AbstractProcessor; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.useragent.UserAgentParser.Details; +import org.elasticsearch.ingest.useragent.UserAgentParser.VersionedName; + +import java.util.Arrays; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; +import static org.elasticsearch.ingest.ConfigurationUtils.readOptionalList; +import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty; + +public class UserAgentProcessor extends AbstractProcessor { + + public static final String TYPE = "user_agent"; + + private final String field; + private final String targetField; + private final Set properties; + + private final UserAgentParser parser; + + public UserAgentProcessor(String tag, String field, String targetField, UserAgentParser parser, Set properties) { + super(tag); + this.field = field; + this.targetField = targetField; + this.parser = parser; + this.properties = properties; + } + + @Override + public void execute(IngestDocument ingestDocument) throws Exception { + String userAgent = ingestDocument.getFieldValue(field, String.class); + + Details uaClient = parser.parse(userAgent); + + Map uaDetails = new HashMap<>(); + for (Property property : this.properties) { + switch (property) { + case NAME: + if (uaClient.userAgent != null && uaClient.userAgent.name != null) { + uaDetails.put("name", uaClient.userAgent.name); + } + else { + uaDetails.put("name", "Other"); + } + break; + case MAJOR: + if (uaClient.userAgent != null && uaClient.userAgent.major != null) { + uaDetails.put("major", uaClient.userAgent.major); + } + break; + case MINOR: + if (uaClient.userAgent != null && uaClient.userAgent.minor != null) { + uaDetails.put("minor", uaClient.userAgent.minor); + } + break; + case PATCH: + if (uaClient.userAgent != null && uaClient.userAgent.patch != null) { + uaDetails.put("patch", uaClient.userAgent.patch); + } + break; + case BUILD: + if (uaClient.userAgent != null && uaClient.userAgent.build != null) { + uaDetails.put("build", uaClient.userAgent.build); + } + break; + case OS: + if (uaClient.operatingSystem != null) { + uaDetails.put("os", buildFullOSName(uaClient.operatingSystem)); + } + else { + uaDetails.put("os", "Other"); + } + + break; + case OS_NAME: + if (uaClient.operatingSystem != null && uaClient.operatingSystem.name != null) { + uaDetails.put("os_name", uaClient.operatingSystem.name); + } + else { + uaDetails.put("os_name", "Other"); + } + break; + case OS_MAJOR: + if (uaClient.operatingSystem != null && uaClient.operatingSystem.major != null) { + uaDetails.put("os_major", uaClient.operatingSystem.major); + } + break; + case OS_MINOR: + if (uaClient.operatingSystem != null && uaClient.operatingSystem.minor != null) { + uaDetails.put("os_minor", uaClient.operatingSystem.minor); + } + break; + case DEVICE: + if (uaClient.device != null && uaClient.device.name != null) { + uaDetails.put("device", uaClient.device.name); + } + else { + uaDetails.put("device", "Other"); + } + break; + } + } + + ingestDocument.setFieldValue(targetField, uaDetails); + } + + /** To maintain compatibility with logstash-filter-useragent */ + private String buildFullOSName(VersionedName operatingSystem) { + if (operatingSystem == null || operatingSystem.name == null) { + return null; + } + + StringBuilder sb = new StringBuilder(operatingSystem.name); + + if (operatingSystem.major != null) { + sb.append(" "); + sb.append(operatingSystem.major); + + if (operatingSystem.minor != null) { + sb.append("."); + sb.append(operatingSystem.minor); + + if (operatingSystem.patch != null) { + sb.append("."); + sb.append(operatingSystem.patch); + + if (operatingSystem.build != null) { + sb.append("."); + sb.append(operatingSystem.build); + } + } + } + } + + return sb.toString(); + } + + @Override + public String getType() { + return TYPE; + } + + String getField() { + return field; + } + + String getTargetField() { + return targetField; + } + + Set getProperties() { + return properties; + } + + UserAgentParser getUaParser() { + return parser; + } + + public static final class Factory implements Processor.Factory { + + private final Map userAgentParsers; + + public Factory(Map userAgentParsers) { + this.userAgentParsers = userAgentParsers; + } + + @Override + public UserAgentProcessor create(Map factories, String processorTag, + Map config) throws Exception { + String field = readStringProperty(TYPE, processorTag, config, "field"); + String targetField = readStringProperty(TYPE, processorTag, config, "target_field", "user_agent"); + String regexFilename = readStringProperty(TYPE, processorTag, config, "regex_file", IngestUserAgentPlugin.DEFAULT_PARSER_NAME); + List propertyNames = readOptionalList(TYPE, processorTag, config, "properties"); + + UserAgentParser parser = userAgentParsers.get(regexFilename); + if (parser == null) { + throw newConfigurationException(TYPE, processorTag, + "regex_file", "regex file [" + regexFilename + "] doesn't exist (has to exist at node startup)"); + } + + final Set properties; + if (propertyNames != null) { + properties = EnumSet.noneOf(Property.class); + for (String fieldName : propertyNames) { + try { + properties.add(Property.parseProperty(fieldName)); + } catch (IllegalArgumentException e) { + throw newConfigurationException(TYPE, processorTag, "properties", e.getMessage()); + } + } + } else { + properties = EnumSet.allOf(Property.class); + } + + return new UserAgentProcessor(processorTag, field, targetField, parser, properties); + } + } + + enum Property { + + NAME, MAJOR, MINOR, PATCH, OS, OS_NAME, OS_MAJOR, OS_MINOR, DEVICE, BUILD; + + public static Property parseProperty(String propertyName) { + try { + return valueOf(propertyName.toUpperCase(Locale.ROOT)); + } + catch (IllegalArgumentException e) { + throw new IllegalArgumentException("illegal property value [" + propertyName + "]. valid values are " + + Arrays.toString(EnumSet.allOf(Property.class).toArray())); + } + } + } +} diff --git a/plugins/ingest-user-agent/src/main/resources/regexes.yaml b/plugins/ingest-user-agent/src/main/resources/regexes.yaml new file mode 100644 index 00000000000..cc28842308c --- /dev/null +++ b/plugins/ingest-user-agent/src/main/resources/regexes.yaml @@ -0,0 +1,4815 @@ +# Apache License, Version 2.0 +# =========================== +# +# Copyright 2009 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +user_agent_parsers: + #### SPECIAL CASES TOP #### + + # @note: iOS / OSX Applications + - regex: '(CFNetwork)(?:/(\d+)\.(\d+)\.?(\d+)?)?' + family_replacement: 'CFNetwork' + + # Pingdom + - regex: '(Pingdom.com_bot_version_)(\d+)\.(\d+)' + family_replacement: 'PingdomBot' + + # Facebook + - regex: '(facebookexternalhit)/(\d+)\.(\d+)' + family_replacement: 'FacebookBot' + + # Google Plus + - regex: 'Google.*/\+/web/snippet' + family_replacement: 'GooglePlusBot' + + # Twitter + - regex: '(Twitterbot)/(\d+)\.(\d+)' + family_replacement: 'TwitterBot' + + # Bots Pattern '/name-0.0' + - regex: '/((?:Ant-)?Nutch|[A-z]+[Bb]ot|[A-z]+[Ss]pider|Axtaris|fetchurl|Isara|ShopSalad|Tailsweep)[ \-](\d+)(?:\.(\d+)(?:\.(\d+))?)?' + # Bots Pattern 'name/0.0' + - regex: '(008|Altresium|Argus|BaiduMobaider|BoardReader|DNSGroup|DataparkSearch|EDI|Goodzer|Grub|INGRID|Infohelfer|LinkedInBot|LOOQ|Nutch|PathDefender|Peew|PostPost|Steeler|Twitterbot|VSE|WebCrunch|WebZIP|Y!J-BR[A-Z]|YahooSeeker|envolk|sproose|wminer)/(\d+)(?:\.(\d+)(?:\.(\d+))?)?' + + # MSIECrawler + - regex: '(MSIE) (\d+)\.(\d+)([a-z]\d?)?;.* MSIECrawler' + family_replacement: 'MSIECrawler' + + # Downloader ... + - regex: '(Google-HTTP-Java-Client|Apache-HttpClient|http%20client|Python-urllib|HttpMonitor|TLSProber|WinHTTP|JNLP)(?:[ /](\d+)(?:\.(\d+)(?:\.(\d+))?)?)?' + + # Bots + - regex: '(1470\.net crawler|50\.nu|8bo Crawler Bot|Aboundex|Accoona-[A-z]+-Agent|AdsBot-Google(?:-[a-z]+)?|altavista|AppEngine-Google|archive.*?\.org_bot|archiver|Ask Jeeves|[Bb]ai[Dd]u[Ss]pider(?:-[A-Za-z]+)*|bingbot|BingPreview|blitzbot|BlogBridge|BoardReader(?: [A-Za-z]+)*|boitho.com-dc|BotSeer|\b\w*favicon\w*\b|\bYeti(?:-[a-z]+)?|Catchpoint bot|[Cc]harlotte|Checklinks|clumboot|Comodo HTTP\(S\) Crawler|Comodo-Webinspector-Crawler|ConveraCrawler|CRAWL-E|CrawlConvera|Daumoa(?:-feedfetcher)?|Feed Seeker Bot|findlinks|Flamingo_SearchEngine|FollowSite Bot|furlbot|Genieo|gigabot|GomezAgent|gonzo1|(?:[a-zA-Z]+-)?Googlebot(?:-[a-zA-Z]+)?|Google SketchUp|grub-client|gsa-crawler|heritrix|HiddenMarket|holmes|HooWWWer|htdig|ia_archiver|ICC-Crawler|Icarus6j|ichiro(?:/mobile)?|IconSurf|IlTrovatore(?:-Setaccio)?|InfuzApp|Innovazion Crawler|InternetArchive|IP2[a-z]+Bot|jbot\b|KaloogaBot|Kraken|Kurzor|larbin|LEIA|LesnikBot|Linguee Bot|LinkAider|LinkedInBot|Lite Bot|Llaut|lycos|Mail\.RU_Bot|masidani_bot|Mediapartners-Google|Microsoft .*? Bot|mogimogi|mozDex|MJ12bot|msnbot(?:-media *)?|msrbot|netresearch|Netvibes|NewsGator[^/]*|^NING|Nutch[^/]*|Nymesis|ObjectsSearch|Orbiter|OOZBOT|PagePeeker|PagesInventory|PaxleFramework|Peeplo Screenshot Bot|PlantyNet_WebRobot|Pompos|Read%20Later|Reaper|RedCarpet|Retreiver|Riddler|Rival IQ|scooter|Scrapy|Scrubby|searchsight|seekbot|semanticdiscovery|Simpy|SimplePie|SEOstats|SimpleRSS|SiteCon|Slurp|snappy|Speedy Spider|Squrl Java|TheUsefulbot|ThumbShotsBot|Thumbshots\.ru|TwitterBot|URL2PNG|Vagabondo|VoilaBot|^vortex|Votay bot|^voyager|WASALive.Bot|Web-sniffer|WebThumb|WeSEE:[A-z]+|WhatWeb|WIRE|WordPress|Wotbox|www\.almaden\.ibm\.com|Xenu(?:.s)? Link Sleuth|Xerka [A-z]+Bot|yacy(?:bot)?|Yahoo[a-z]*Seeker|Yahoo! Slurp|Yandex\w+|YodaoBot(?:-[A-z]+)?|YottaaMonitor|Yowedo|^Zao|^Zao-Crawler|ZeBot_www\.ze\.bz|ZooShot|ZyBorg)(?:[ /]v?(\d+)(?:\.(\d+)(?:\.(\d+))?)?)?' + + # Bots General matcher 'name/0.0' + - regex: '(?:\/[A-Za-z0-9\.]+)? *([A-Za-z0-9 \-_\!\[\]:]*(?:[Aa]rchiver|[Ii]ndexer|[Ss]craper|[Bb]ot|[Ss]pider|[Cc]rawl[a-z]*))/(\d+)(?:\.(\d+)(?:\.(\d+))?)?' + # Bots General matcher 'name 0.0' + - regex: '(?:\/[A-Za-z0-9\.]+)? *([A-Za-z0-9 _\!\[\]:]*(?:[Aa]rchiver|[Ii]ndexer|[Ss]craper|[Bb]ot|[Ss]pider|[Cc]rawl[a-z]*)) (\d+)(?:\.(\d+)(?:\.(\d+))?)?' + # Bots containing spider|scrape|bot(but not CUBOT)|Crawl + - regex: '((?:[A-z0-9]+|[A-z\-]+ ?)?(?: the )?(?:[Ss][Pp][Ii][Dd][Ee][Rr]|[Ss]crape|[A-Za-z0-9-]*(?:[^C][^Uu])[Bb]ot|[Cc][Rr][Aa][Ww][Ll])[A-z0-9]*)(?:(?:[ /]| v)(\d+)(?:\.(\d+)(?:\.(\d+))?)?)?' + + # HbbTV standard defines what features the browser should understand. + # but it's like targeting "HTML5 browsers", effective browser support depends on the model + # See os_parsers if you want to target a specific TV + - regex: '(HbbTV)/(\d+)\.(\d+)\.(\d+) \(' + + # must go before Firefox to catch Chimera/SeaMonkey/Camino + - regex: '(Chimera|SeaMonkey|Camino)/(\d+)\.(\d+)\.?([ab]?\d+[a-z]*)?' + + # Social Networks + # Facebook + - regex: '\[FB.*;(FBAV)/(\d+)(?:\.(\d+)(?:\.(\d)+)?)?' + family_replacement: 'Facebook' + # Pinterest + - regex: '\[(Pinterest)/[^\]]+\]' + - regex: '(Pinterest)(?: for Android(?: Tablet)?)?/(\d+)(?:\.(\d+)(?:\.(\d)+)?)?' + + # Firefox + - regex: '(Pale[Mm]oon)/(\d+)\.(\d+)\.?(\d+)?' + family_replacement: 'Pale Moon (Firefox Variant)' + - regex: '(Fennec)/(\d+)\.(\d+)\.?([ab]?\d+[a-z]*)' + family_replacement: 'Firefox Mobile' + - regex: '(Fennec)/(\d+)\.(\d+)(pre)' + family_replacement: 'Firefox Mobile' + - regex: '(Fennec)/(\d+)\.(\d+)' + family_replacement: 'Firefox Mobile' + - regex: '(?:Mobile|Tablet);.*(Firefox)/(\d+)\.(\d+)' + family_replacement: 'Firefox Mobile' + - regex: '(Namoroka|Shiretoko|Minefield)/(\d+)\.(\d+)\.(\d+(?:pre)?)' + family_replacement: 'Firefox ($1)' + - regex: '(Firefox)/(\d+)\.(\d+)(a\d+[a-z]*)' + family_replacement: 'Firefox Alpha' + - regex: '(Firefox)/(\d+)\.(\d+)(b\d+[a-z]*)' + family_replacement: 'Firefox Beta' + - regex: '(Firefox)-(?:\d+\.\d+)?/(\d+)\.(\d+)(a\d+[a-z]*)' + family_replacement: 'Firefox Alpha' + - regex: '(Firefox)-(?:\d+\.\d+)?/(\d+)\.(\d+)(b\d+[a-z]*)' + family_replacement: 'Firefox Beta' + - regex: '(Namoroka|Shiretoko|Minefield)/(\d+)\.(\d+)([ab]\d+[a-z]*)?' + family_replacement: 'Firefox ($1)' + - regex: '(Firefox).*Tablet browser (\d+)\.(\d+)\.(\d+)' + family_replacement: 'MicroB' + - regex: '(MozillaDeveloperPreview)/(\d+)\.(\d+)([ab]\d+[a-z]*)?' + - regex: '(FxiOS)/(\d+)\.(\d+)(\.(\d+))?(\.(\d+))?' + family_replacement: 'Firefox iOS' + + # e.g.: Flock/2.0b2 + - regex: '(Flock)/(\d+)\.(\d+)(b\d+?)' + + # RockMelt + - regex: '(RockMelt)/(\d+)\.(\d+)\.(\d+)' + + # e.g.: Fennec/0.9pre + - regex: '(Navigator)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Netscape' + + - regex: '(Navigator)/(\d+)\.(\d+)([ab]\d+)' + family_replacement: 'Netscape' + + - regex: '(Netscape6)/(\d+)\.(\d+)\.?([ab]?\d+)?' + family_replacement: 'Netscape' + + - regex: '(MyIBrow)/(\d+)\.(\d+)' + family_replacement: 'My Internet Browser' + + # Opera will stop at 9.80 and hide the real version in the Version string. + # see: http://dev.opera.com/articles/view/opera-ua-string-changes/ + - regex: '(Opera Tablet).*Version/(\d+)\.(\d+)(?:\.(\d+))?' + - regex: '(Opera Mini)(?:/att)?/?(\d+)?(?:\.(\d+))?(?:\.(\d+))?' + - regex: '(Opera)/.+Opera Mobi.+Version/(\d+)\.(\d+)' + family_replacement: 'Opera Mobile' + - regex: '(Opera)/(\d+)\.(\d+).+Opera Mobi' + family_replacement: 'Opera Mobile' + - regex: 'Opera Mobi.+(Opera)(?:/|\s+)(\d+)\.(\d+)' + family_replacement: 'Opera Mobile' + - regex: 'Opera Mobi' + family_replacement: 'Opera Mobile' + - regex: '(Opera)/9.80.*Version/(\d+)\.(\d+)(?:\.(\d+))?' + + # Opera 14 for Android uses a WebKit render engine. + - regex: '(?:Mobile Safari).*(OPR)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Opera Mobile' + + # Opera >=15 for Desktop is similar to Chrome but includes an "OPR" Version string. + - regex: '(?:Chrome).*(OPR)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Opera' + + # Opera Coast + - regex: '(Coast)/(\d+).(\d+).(\d+)' + family_replacement: 'Opera Coast' + + # Opera Mini for iOS (from version 8.0.0) + - regex: '(OPiOS)/(\d+).(\d+).(\d+)' + family_replacement: 'Opera Mini' + + # Palm WebOS looks a lot like Safari. + - regex: '(hpw|web)OS/(\d+)\.(\d+)(?:\.(\d+))?' + family_replacement: 'webOS Browser' + + # LuaKit has no version info. + # http://luakit.org/projects/luakit/ + - regex: '(luakit)' + family_replacement: 'LuaKit' + + # Snowshoe + - regex: '(Snowshoe)/(\d+)\.(\d+).(\d+)' + + # Lightning (for Thunderbird) + # http://www.mozilla.org/projects/calendar/lightning/ + - regex: '(Lightning)/(\d+)\.(\d+)\.?((?:[ab]?\d+[a-z]*)|(?:\d*))' + + # Swiftfox + - regex: '(Firefox)/(\d+)\.(\d+)\.(\d+(?:pre)?) \(Swiftfox\)' + family_replacement: 'Swiftfox' + - regex: '(Firefox)/(\d+)\.(\d+)([ab]\d+[a-z]*)? \(Swiftfox\)' + family_replacement: 'Swiftfox' + + # Rekonq + - regex: '(rekonq)/(\d+)\.(\d+)\.?(\d+)? Safari' + family_replacement: 'Rekonq' + - regex: 'rekonq' + family_replacement: 'Rekonq' + + # Conkeror lowercase/uppercase + # http://conkeror.org/ + - regex: '(conkeror|Conkeror)/(\d+)\.(\d+)\.?(\d+)?' + family_replacement: 'Conkeror' + + # catches lower case konqueror + - regex: '(konqueror)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Konqueror' + + - regex: '(WeTab)-Browser' + + - regex: '(Comodo_Dragon)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Comodo Dragon' + + - regex: '(Symphony) (\d+).(\d+)' + + - regex: '(Minimo)' + + - regex: 'PLAYSTATION 3.+WebKit' + family_replacement: 'NetFront NX' + - regex: 'PLAYSTATION 3' + family_replacement: 'NetFront' + - regex: '(PlayStation Portable)' + family_replacement: 'NetFront' + - regex: '(PlayStation Vita)' + family_replacement: 'NetFront NX' + + - regex: 'AppleWebKit.+ (NX)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'NetFront NX' + - regex: '(Nintendo 3DS)' + family_replacement: 'NetFront NX' + + # Amazon Silk, should go before Safari and Chrome Mobile + - regex: '(Silk)/(\d+)\.(\d+)(?:\.([0-9\-]+))?' + family_replacement: 'Amazon Silk' + + + # @ref: http://www.puffinbrowser.com + - regex: '(Puffin)/(\d+)\.(\d+)(?:\.(\d+))?' + + # Edge Mobile + - regex: 'Windows Phone .*(Edge)/(\d+)\.(\d+)' + family_replacement: 'Edge Mobile' + + # Samsung Internet (based on Chrome, but lacking some features) + - regex: '(SamsungBrowser)/(\d+)\.(\d+)' + family_replacement: 'Samsung Internet' + + # Chrome Mobile + - regex: '(CrMo)/(\d+)\.(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Chrome Mobile' + - regex: '(CriOS)/(\d+)\.(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Chrome Mobile iOS' + - regex: '(Chrome)/(\d+)\.(\d+)\.(\d+)\.(\d+) Mobile' + family_replacement: 'Chrome Mobile' + + # Chrome Frame must come before MSIE. + - regex: '(chromeframe)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Chrome Frame' + + # UC Browser + - regex: '(UCBrowser)[ /](\d+)\.(\d+)\.(\d+)' + family_replacement: 'UC Browser' + - regex: '(UC Browser)[ /](\d+)\.(\d+)\.(\d+)' + - regex: '(UC Browser|UCBrowser|UCWEB)(\d+)\.(\d+)\.(\d+)' + family_replacement: 'UC Browser' + + # Tizen Browser (second case included in browser/major.minor regex) + - regex: '(SLP Browser)/(\d+)\.(\d+)' + family_replacement: 'Tizen Browser' + + # Sogou Explorer 2.X + - regex: '(SE 2\.X) MetaSr (\d+)\.(\d+)' + family_replacement: 'Sogou Explorer' + + # Baidu Browsers (desktop spoofs chrome & IE, explorer is mobile) + - regex: '(baidubrowser)[/\s](\d+)' + family_replacement: 'Baidu Browser' + - regex: '(FlyFlow)/(\d+)\.(\d+)' + family_replacement: 'Baidu Explorer' + + # QQ Browsers + - regex: '(MQQBrowser/Mini)(?:(\d+)(?:\.(\d+)(?:\.(\d+))?)?)?' + family_replacement: 'QQ Browser Mini' + - regex: '(MQQBrowser)(?:/(\d+)(?:\.(\d+)(?:\.(\d+))?)?)?' + family_replacement: 'QQ Browser Mobile' + - regex: '(QQBrowser)(?:/(\d+)(?:\.(\d+)\.(\d+)(?:\.(\d+))?)?)?' + family_replacement: 'QQ Browser' + + # Rackspace Monitoring + - regex: '(Rackspace Monitoring)/(\d+)\.(\d+)' + family_replacement: 'RackspaceBot' + + # PyAMF + - regex: '(PyAMF)/(\d+)\.(\d+)\.(\d+)' + + # Yandex Browser + - regex: '(YaBrowser)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Yandex Browser' + + # Mail.ru Amigo/Internet Browser (Chromium-based) + - regex: '(Chrome)/(\d+)\.(\d+)\.(\d+).* MRCHROME' + family_replacement: 'Mail.ru Chromium Browser' + + # AOL Browser (IE-based) + - regex: '(AOL) (\d+)\.(\d+); AOLBuild (\d+)' + + #### END SPECIAL CASES TOP #### + + #### MAIN CASES - this catches > 50% of all browsers #### + + # Browser/major_version.minor_version.beta_version + - regex: '(AdobeAIR|FireWeb|Jasmine|ANTGalio|Midori|Fresco|Lobo|PaleMoon|Maxthon|Lynx|OmniWeb|Dillo|Camino|Demeter|Fluid|Fennec|Epiphany|Shiira|Sunrise|Spotify|Flock|Netscape|Lunascape|WebPilot|NetFront|Netfront|Konqueror|SeaMonkey|Kazehakase|Vienna|Iceape|Iceweasel|IceWeasel|Iron|K-Meleon|Sleipnir|Galeon|GranParadiso|Opera Mini|iCab|NetNewsWire|ThunderBrowse|Iris|UP\.Browser|Bunjalloo|Google Earth|Raven for Mac|Openwave)/(\d+)\.(\d+)\.(\d+)' + + # Outlook 2007 + - regex: 'Microsoft Office Outlook 12\.\d+\.\d+|MSOffice 12' + family_replacement: 'Outlook' + v1_replacement: '2007' + + # Outlook 2010 + - regex: 'Microsoft Outlook 14\.\d+\.\d+|MSOffice 14' + family_replacement: 'Outlook' + v1_replacement: '2010' + + # Outlook 2013 + - regex: 'Microsoft Outlook 15\.\d+\.\d+' + family_replacement: 'Outlook' + v1_replacement: '2013' + + # Outlook 2016 + - regex: 'Microsoft Outlook (?:Mail )?16\.\d+\.\d+' + family_replacement: 'Outlook' + v1_replacement: '2016' + + # Windows Live Mail + - regex: 'Outlook-Express\/7\.0.*' + family_replacement: 'Windows Live Mail' + + # Apple Air Mail + - regex: '(Airmail) (\d+)\.(\d+)(?:\.(\d+))?' + + # Thunderbird + - regex: '(Thunderbird)/(\d+)\.(\d+)\.(\d+(?:pre)?)' + family_replacement: 'Thunderbird' + + # Vivaldi uses "Vivaldi" + - regex: '(Vivaldi)/(\d+)\.(\d+)\.(\d+)' + + # Edge/major_version.minor_version + - regex: '(Edge)/(\d+)\.(\d+)' + + # Brave Browser https://brave.com/ + - regex: '(brave)/(\d+)\.(\d+)\.(\d+) Chrome' + family_replacement: 'Brave' + + # Chrome/Chromium/major_version.minor_version.beta_version + - regex: '(Chromium|Chrome)/(\d+)\.(\d+)\.(\d+)' + + # Dolphin Browser + # @ref: http://www.dolphin.com + - regex: '\b(Dolphin)(?: |HDCN/|/INT\-)(\d+)\.(\d+)\.?(\d+)?' + + # Browser/major_version.minor_version + - regex: '(bingbot|Bolt|Jasmine|IceCat|Skyfire|Midori|Maxthon|Lynx|Arora|IBrowse|Dillo|Camino|Shiira|Fennec|Phoenix|Chrome|Flock|Netscape|Lunascape|Epiphany|WebPilot|Opera Mini|Opera|NetFront|Netfront|Konqueror|Googlebot|SeaMonkey|Kazehakase|Vienna|Iceape|Iceweasel|IceWeasel|Iron|K-Meleon|Sleipnir|Galeon|GranParadiso|iCab|iTunes|MacAppStore|NetNewsWire|Space Bison|Stainless|Orca|Dolfin|BOLT|Minimo|Tizen Browser|Polaris|Abrowser|Planetweb|ICE Browser|mDolphin|qutebrowser|Otter|QupZilla)/(\d+)\.(\d+)\.?(\d+)?' + + # Chrome/Chromium/major_version.minor_version + - regex: '(Chromium|Chrome)/(\d+)\.(\d+)' + + ########## + # IE Mobile needs to happen before Android to catch cases such as: + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920)... + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920; ANZ821)... + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920; Orange)... + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920; Vodafone)... + ########## + + # IE Mobile + - regex: '(IEMobile)[ /](\d+)\.(\d+)' + family_replacement: 'IE Mobile' + + # Browser major_version.minor_version.beta_version (space instead of slash) + - regex: '(iRider|Crazy Browser|SkipStone|iCab|Lunascape|Sleipnir|Maemo Browser) (\d+)\.(\d+)\.(\d+)' + # Browser major_version.minor_version (space instead of slash) + - regex: '(iCab|Lunascape|Opera|Android|Jasmine|Polaris) (\d+)\.(\d+)\.?(\d+)?' + + # Kindle WebKit + - regex: '(Kindle)/(\d+)\.(\d+)' + + # weird android UAs + - regex: '(Android) Donut' + v1_replacement: '1' + v2_replacement: '2' + + - regex: '(Android) Eclair' + v1_replacement: '2' + v2_replacement: '1' + + - regex: '(Android) Froyo' + v1_replacement: '2' + v2_replacement: '2' + + - regex: '(Android) Gingerbread' + v1_replacement: '2' + v2_replacement: '3' + + - regex: '(Android) Honeycomb' + v1_replacement: '3' + + # desktop mode + # http://www.anandtech.com/show/3982/windows-phone-7-review + - regex: '(MSIE) (\d+)\.(\d+).*XBLWP7' + family_replacement: 'IE Large Screen' + + #### END MAIN CASES #### + + #### SPECIAL CASES #### + - regex: '(Obigo)InternetBrowser' + - regex: '(Obigo)\-Browser' + - regex: '(Obigo|OBIGO)[^\d]*(\d+)(?:.(\d+))?' + family_replacement: 'Obigo' + + - regex: '(MAXTHON|Maxthon) (\d+)\.(\d+)' + family_replacement: 'Maxthon' + - regex: '(Maxthon|MyIE2|Uzbl|Shiira)' + v1_replacement: '0' + + - regex: '(BrowseX) \((\d+)\.(\d+)\.(\d+)' + + - regex: '(NCSA_Mosaic)/(\d+)\.(\d+)' + family_replacement: 'NCSA Mosaic' + + # Polaris/d.d is above + - regex: '(POLARIS)/(\d+)\.(\d+)' + family_replacement: 'Polaris' + - regex: '(Embider)/(\d+)\.(\d+)' + family_replacement: 'Polaris' + + - regex: '(BonEcho)/(\d+)\.(\d+)\.?([ab]?\d+)?' + family_replacement: 'Bon Echo' + + # @note: iOS / OSX Applications + - regex: '(iPod|iPhone|iPad).+Version/(\d+)\.(\d+)(?:\.(\d+))?.* Safari' + family_replacement: 'Mobile Safari' + - regex: '(iPod|iPhone|iPad).+Version/(\d+)\.(\d+)(?:\.(\d+))?' + family_replacement: 'Mobile Safari UI/WKWebView' + - regex: '(iPod|iPhone|iPad);.*CPU.*OS (\d+)_(\d+)(?:_(\d+))?.*Mobile.* Safari' + family_replacement: 'Mobile Safari' + - regex: '(iPod|iPhone|iPad);.*CPU.*OS (\d+)_(\d+)(?:_(\d+))?.*Mobile' + family_replacement: 'Mobile Safari UI/WKWebView' + - regex: '(iPod|iPhone|iPad).* Safari' + family_replacement: 'Mobile Safari' + - regex: '(iPod|iPhone|iPad)' + family_replacement: 'Mobile Safari UI/WKWebView' + + - regex: '(AvantGo) (\d+).(\d+)' + + - regex: '(OneBrowser)/(\d+).(\d+)' + family_replacement: 'ONE Browser' + + - regex: '(Avant)' + v1_replacement: '1' + + # This is the Tesla Model S (see similar entry in device parsers) + - regex: '(QtCarBrowser)' + v1_replacement: '1' + + - regex: '^(iBrowser/Mini)(\d+).(\d+)' + family_replacement: 'iBrowser Mini' + - regex: '^(iBrowser|iRAPP)/(\d+).(\d+)' + + # nokia browsers + # based on: http://www.developer.nokia.com/Community/Wiki/User-Agent_headers_for_Nokia_devices + - regex: '^(Nokia)' + family_replacement: 'Nokia Services (WAP) Browser' + - regex: '(NokiaBrowser)/(\d+)\.(\d+).(\d+)\.(\d+)' + family_replacement: 'Nokia Browser' + - regex: '(NokiaBrowser)/(\d+)\.(\d+).(\d+)' + family_replacement: 'Nokia Browser' + - regex: '(NokiaBrowser)/(\d+)\.(\d+)' + family_replacement: 'Nokia Browser' + - regex: '(BrowserNG)/(\d+)\.(\d+).(\d+)' + family_replacement: 'Nokia Browser' + - regex: '(Series60)/5\.0' + family_replacement: 'Nokia Browser' + v1_replacement: '7' + v2_replacement: '0' + - regex: '(Series60)/(\d+)\.(\d+)' + family_replacement: 'Nokia OSS Browser' + - regex: '(S40OviBrowser)/(\d+)\.(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Ovi Browser' + - regex: '(Nokia)[EN]?(\d+)' + + # BlackBerry devices + - regex: '(PlayBook).+RIM Tablet OS (\d+)\.(\d+)\.(\d+)' + family_replacement: 'BlackBerry WebKit' + - regex: '(Black[bB]erry|BB10).+Version/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'BlackBerry WebKit' + - regex: '(Black[bB]erry)\s?(\d+)' + family_replacement: 'BlackBerry' + + - regex: '(OmniWeb)/v(\d+)\.(\d+)' + + - regex: '(Blazer)/(\d+)\.(\d+)' + family_replacement: 'Palm Blazer' + + - regex: '(Pre)/(\d+)\.(\d+)' + family_replacement: 'Palm Pre' + + # fork of Links + - regex: '(ELinks)/(\d+)\.(\d+)' + - regex: '(ELinks) \((\d+)\.(\d+)' + - regex: '(Links) \((\d+)\.(\d+)' + + - regex: '(QtWeb) Internet Browser/(\d+)\.(\d+)' + + #- regex: '\(iPad;.+(Version)/(\d+)\.(\d+)(?:\.(\d+))?.*Safari/' + # family_replacement: 'iPad' + + # Phantomjs, should go before Safari + - regex: '(PhantomJS)/(\d+)\.(\d+)\.(\d+)' + + # WebKit Nightly + - regex: '(AppleWebKit)/(\d+)\.?(\d+)?\+ .* Safari' + family_replacement: 'WebKit Nightly' + + # Safari + - regex: '(Version)/(\d+)\.(\d+)(?:\.(\d+))?.*Safari/' + family_replacement: 'Safari' + # Safari didn't provide "Version/d.d.d" prior to 3.0 + - regex: '(Safari)/\d+' + + - regex: '(OLPC)/Update(\d+)\.(\d+)' + + - regex: '(OLPC)/Update()\.(\d+)' + v1_replacement: '0' + + - regex: '(SEMC\-Browser)/(\d+)\.(\d+)' + + - regex: '(Teleca)' + family_replacement: 'Teleca Browser' + + - regex: '(Phantom)/V(\d+)\.(\d+)' + family_replacement: 'Phantom Browser' + + - regex: 'Trident(.*)rv.(\d+)\.(\d+)' + family_replacement: 'IE' + + # Espial + - regex: '(Espial)/(\d+)(?:\.(\d+))?(?:\.(\d+))?' + + # Apple Mail + + # apple mail - not directly detectable, have it after Safari stuff + - regex: '(AppleWebKit)/(\d+)\.(\d+)\.(\d+)' + family_replacement: 'Apple Mail' + + # AFTER THE EDGE CASES ABOVE! + # AFTER IE11 + # BEFORE all other IE + - regex: '(Firefox)/(\d+)\.(\d+)\.(\d+)' + - regex: '(Firefox)/(\d+)\.(\d+)(pre|[ab]\d+[a-z]*)?' + + - regex: '([MS]?IE) (\d+)\.(\d+)' + family_replacement: 'IE' + + - regex: '(python-requests)/(\d+)\.(\d+)' + family_replacement: 'Python Requests' + + - regex: '(Java)[/ ]{0,1}\d+\.(\d+)\.(\d+)[_-]*([a-zA-Z0-9]+)*' + + # Roku Digital-Video-Players https://www.roku.com/ + - regex: '^(Roku)/DVP-(\d+)\.(\d+)' + +os_parsers: + ########## + # HbbTV vendors + ########## + + # starts with the easy one : Panasonic seems consistent across years, hope it will continue + #HbbTV/1.1.1 (;Panasonic;VIERA 2011;f.532;0071-0802 2000-0000;) + #HbbTV/1.1.1 (;Panasonic;VIERA 2012;1.261;0071-3103 2000-0000;) + #HbbTV/1.2.1 (;Panasonic;VIERA 2013;3.672;4101-0003 0002-0000;) + #- regex: 'HbbTV/\d+\.\d+\.\d+ \(;(Panasonic);VIERA ([0-9]{4});' + + # Sony is consistent too but do not place year like the other + # Opera/9.80 (Linux armv7l; HbbTV/1.1.1 (; Sony; KDL32W650A; PKG3.211EUA; 2013;); ) Presto/2.12.362 Version/12.11 + # Opera/9.80 (Linux mips; U; HbbTV/1.1.1 (; Sony; KDL40HX751; PKG1.902EUA; 2012;);; en) Presto/2.10.250 Version/11.60 + # Opera/9.80 (Linux mips; U; HbbTV/1.1.1 (; Sony; KDL22EX320; PKG4.017EUA; 2011;);; en) Presto/2.7.61 Version/11.00 + #- regex: 'HbbTV/\d+\.\d+\.\d+ \(; (Sony);.*;.*; ([0-9]{4});\)' + + + # LG is consistent too, but we need to add manually the year model + #Mozilla/5.0 (Unknown; Linux armv7l) AppleWebKit/537.1+ (KHTML, like Gecko) Safari/537.1+ HbbTV/1.1.1 ( ;LGE ;NetCast 4.0 ;03.20.30 ;1.0M ;) + #Mozilla/5.0 (DirectFB; Linux armv7l) AppleWebKit/534.26+ (KHTML, like Gecko) Version/5.0 Safari/534.26+ HbbTV/1.1.1 ( ;LGE ;NetCast 3.0 ;1.0 ;1.0M ;) + - regex: 'HbbTV/\d+\.\d+\.\d+ \( ;(LG)E ;NetCast 4.0' + os_v1_replacement: '2013' + - regex: 'HbbTV/\d+\.\d+\.\d+ \( ;(LG)E ;NetCast 3.0' + os_v1_replacement: '2012' + + # Samsung is on its way of normalizing their user-agent + # HbbTV/1.1.1 (;Samsung;SmartTV2013;T-FXPDEUC-1102.2;;) WebKit + # HbbTV/1.1.1 (;Samsung;SmartTV2013;T-MST12DEUC-1102.1;;) WebKit + # HbbTV/1.1.1 (;Samsung;SmartTV2012;;;) WebKit + # HbbTV/1.1.1 (;;;;;) Maple_2011 + - regex: 'HbbTV/1.1.1 \(;;;;;\) Maple_2011' + os_replacement: 'Samsung' + os_v1_replacement: '2011' + # manage the two models of 2013 + - regex: 'HbbTV/\d+\.\d+\.\d+ \(;(Samsung);SmartTV([0-9]{4});.*FXPDEUC' + os_v2_replacement: 'UE40F7000' + - regex: 'HbbTV/\d+\.\d+\.\d+ \(;(Samsung);SmartTV([0-9]{4});.*MST12DEUC' + os_v2_replacement: 'UE32F4500' + # generic Samsung (works starting in 2012) + #- regex: 'HbbTV/\d+\.\d+\.\d+ \(;(Samsung);SmartTV([0-9]{4});' + + # Philips : not found any other way than a manual mapping + # Opera/9.80 (Linux mips; U; HbbTV/1.1.1 (; Philips; ; ; ; ) CE-HTML/1.0 NETTV/4.1.3 PHILIPSTV/1.1.1; en) Presto/2.10.250 Version/11.60 + # Opera/9.80 (Linux mips ; U; HbbTV/1.1.1 (; Philips; ; ; ; ) CE-HTML/1.0 NETTV/3.2.1; en) Presto/2.6.33 Version/10.70 + - regex: 'HbbTV/1.1.1 \(; (Philips);.*NETTV/4' + os_v1_replacement: '2013' + - regex: 'HbbTV/1.1.1 \(; (Philips);.*NETTV/3' + os_v1_replacement: '2012' + - regex: 'HbbTV/1.1.1 \(; (Philips);.*NETTV/2' + os_v1_replacement: '2011' + + # the HbbTV emulator developers use HbbTV/1.1.1 (;;;;;) firetv-firefox-plugin 1.1.20 + - regex: 'HbbTV/\d+\.\d+\.\d+.*(firetv)-firefox-plugin (\d+).(\d+).(\d+)' + os_replacement: 'FireHbbTV' + + # generic HbbTV, hoping to catch manufacturer name (always after 2nd comma) and the first string that looks like a 2011-2019 year + - regex: 'HbbTV/\d+\.\d+\.\d+ \(.*; ?([a-zA-Z]+) ?;.*(201[1-9]).*\)' + + ########## + # @note: Windows Phone needs to come before Windows NT 6.1 *and* before Android to catch cases such as: + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920)... + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920; ANZ821)... + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920; Orange)... + # Mozilla/5.0 (Mobile; Windows Phone 8.1; Android 4.0; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 920; Vodafone)... + ########## + + - regex: '(Windows Phone) (?:OS[ /])?(\d+)\.(\d+)' + + ########## + # Android + # can actually detect rooted android os. do we care? + ########## + - regex: '(Android)[ \-/](\d+)\.(\d+)(?:[.\-]([a-z0-9]+))?' + + - regex: '(Android) Donut' + os_v1_replacement: '1' + os_v2_replacement: '2' + + - regex: '(Android) Eclair' + os_v1_replacement: '2' + os_v2_replacement: '1' + + - regex: '(Android) Froyo' + os_v1_replacement: '2' + os_v2_replacement: '2' + + - regex: '(Android) Gingerbread' + os_v1_replacement: '2' + os_v2_replacement: '3' + + - regex: '(Android) Honeycomb' + os_v1_replacement: '3' + + # UCWEB + - regex: '^UCWEB.*; (Adr) (\d+)\.(\d+)(?:[.\-]([a-z0-9]+))?;' + os_replacement: 'Android' + - regex: '^UCWEB.*; (iPad OS|iPh OS) (\d+)_(\d+)(?:_(\d+))?;' + os_replacement: 'iOS' + - regex: '^UCWEB.*; (wds) (\d+)\.(\d+)(?:\.(\d+))?;' + os_replacement: 'Windows Phone' + # JUC + - regex: '^(JUC).*; ?U; ?(?:Android)?(\d+)\.(\d+)(?:[\.\-]([a-z0-9]+))?' + os_replacement: 'Android' + + ########## + # Kindle Android + ########## + - regex: '(Silk-Accelerated=[a-z]{4,5})' + os_replacement: 'Android' + + ########## + # Windows + # http://en.wikipedia.org/wiki/Windows_NT#Releases + # possibility of false positive when different marketing names share same NT kernel + # e.g. windows server 2003 and windows xp + # lots of ua strings have Windows NT 4.1 !?!?!?!? !?!? !? !????!?! !!! ??? !?!?! ? + # (very) roughly ordered in terms of frequency of occurence of regex (win xp currently most frequent, etc) + ########## + + # ie mobile desktop mode + # spoofs nt 6.1. must come before windows 7 + - regex: '(XBLWP7)' + os_replacement: 'Windows Phone' + + # @note: This needs to come before Windows NT 6.1 + - regex: '(Windows ?Mobile)' + os_replacement: 'Windows Mobile' + + - regex: '(Windows (?:NT 5\.2|NT 5\.1))' + os_replacement: 'Windows XP' + + - regex: '(Windows NT 6\.1)' + os_replacement: 'Windows 7' + + - regex: '(Windows NT 6\.0)' + os_replacement: 'Windows Vista' + + - regex: '(Win 9x 4\.90)' + os_replacement: 'Windows ME' + + - regex: '(Windows 98|Windows XP|Windows ME|Windows 95|Windows CE|Windows 7|Windows NT 4\.0|Windows Vista|Windows 2000|Windows 3.1)' + + - regex: '(Windows NT 6\.2; ARM;)' + os_replacement: 'Windows RT' + - regex: '(Windows NT 6\.2)' + os_replacement: 'Windows 8' + + - regex: '(Windows NT 6\.3; ARM;)' + os_replacement: 'Windows RT 8.1' + - regex: '(Windows NT 6\.3)' + os_replacement: 'Windows 8.1' + + - regex: '(Windows NT 6\.4)' + os_replacement: 'Windows 10' + - regex: '(Windows NT 10\.0)' + os_replacement: 'Windows 10' + + - regex: '(Windows NT 5\.0)' + os_replacement: 'Windows 2000' + + - regex: '(WinNT4.0)' + os_replacement: 'Windows NT 4.0' + + - regex: '(Windows ?CE)' + os_replacement: 'Windows CE' + + - regex: 'Win ?(95|98|3.1|NT|ME|2000)' + os_replacement: 'Windows $1' + + - regex: 'Win16' + os_replacement: 'Windows 3.1' + + - regex: 'Win32' + os_replacement: 'Windows 95' + + ########## + # Tizen OS from Samsung + # spoofs Android so pushing it above + ########## + - regex: '(Tizen)/(\d+)\.(\d+)' + + ########## + # Mac OS + # @ref: http://en.wikipedia.org/wiki/Mac_OS_X#Versions + # @ref: http://www.puredarwin.org/curious/versions + ########## + - regex: '((?:Mac ?|; )OS X)[\s/](?:(\d+)[_.](\d+)(?:[_.](\d+))?|Mach-O)' + os_replacement: 'Mac OS X' + # Leopard + - regex: ' (Dar)(win)/(9).(\d+).*\((?:i386|x86_64|Power Macintosh)\)' + os_replacement: 'Mac OS X' + os_v1_replacement: '10' + os_v2_replacement: '5' + # Snow Leopard + - regex: ' (Dar)(win)/(10).(\d+).*\((?:i386|x86_64)\)' + os_replacement: 'Mac OS X' + os_v1_replacement: '10' + os_v2_replacement: '6' + # Lion + - regex: ' (Dar)(win)/(11).(\d+).*\((?:i386|x86_64)\)' + os_replacement: 'Mac OS X' + os_v1_replacement: '10' + os_v2_replacement: '7' + # Mountain Lion + - regex: ' (Dar)(win)/(12).(\d+).*\((?:i386|x86_64)\)' + os_replacement: 'Mac OS X' + os_v1_replacement: '10' + os_v2_replacement: '8' + # Mavericks + - regex: ' (Dar)(win)/(13).(\d+).*\((?:i386|x86_64)\)' + os_replacement: 'Mac OS X' + os_v1_replacement: '10' + os_v2_replacement: '9' + # Yosemite is Darwin/14.x but patch versions are inconsistent in the Darwin string; + # more accurately covered by CFNetwork regexes downstream + + # IE on Mac doesn't specify version number + - regex: 'Mac_PowerPC' + os_replacement: 'Mac OS' + + # builds before tiger don't seem to specify version? + + # ios devices spoof (mac os x), so including intel/ppc prefixes + - regex: '(?:PPC|Intel) (Mac OS X)' + + ########## + # iOS + # http://en.wikipedia.org/wiki/IOS_version_history + ########## + # keep this above generic iOS, since AppleTV UAs contain 'CPU OS' + - regex: '(Apple\s?TV)(?:/(\d+)\.(\d+))?' + os_replacement: 'ATV OS X' + + - regex: '(CPU OS|iPhone OS|CPU iPhone) +(\d+)[_\.](\d+)(?:[_\.](\d+))?' + os_replacement: 'iOS' + + # remaining cases are mostly only opera uas, so catch opera as to not catch iphone spoofs + - regex: '(iPhone|iPad|iPod); Opera' + os_replacement: 'iOS' + + # few more stragglers + - regex: '(iPhone|iPad|iPod).*Mac OS X.*Version/(\d+)\.(\d+)' + os_replacement: 'iOS' + + # CFNetwork/Darwin - The specific CFNetwork or Darwin version determines + # whether the os maps to Mac OS, or iOS, or just Darwin. + # See: http://user-agents.me/cfnetwork-version-list + - regex: '(CFNetwork)/(5)48\.0\.3.* Darwin/11\.0\.0' + os_replacement: 'iOS' + - regex: '(CFNetwork)/(5)48\.(0)\.4.* Darwin/(1)1\.0\.0' + os_replacement: 'iOS' + - regex: '(CFNetwork)/(5)48\.(1)\.4' + os_replacement: 'iOS' + - regex: '(CFNetwork)/(4)85\.1(3)\.9' + os_replacement: 'iOS' + - regex: '(CFNetwork)/(6)09\.(1)\.4' + os_replacement: 'iOS' + - regex: '(CFNetwork)/(6)(0)9' + os_replacement: 'iOS' + - regex: '(CFNetwork)/6(7)2\.(1)\.13' + os_replacement: 'iOS' + - regex: '(CFNetwork)/6(7)2\.(1)\.(1)4' + os_replacement: 'iOS' + - regex: '(CF)(Network)/6(7)(2)\.1\.15' + os_replacement: 'iOS' + os_v1_replacement: '7' + os_v2_replacement: '1' + - regex: '(CFNetwork)/6(7)2\.(0)\.(?:2|8)' + os_replacement: 'iOS' + - regex: '(CFNetwork)/709\.1' + os_replacement: 'iOS' + os_v1_replacement: '8' + os_v2_replacement: '0.b5' + - regex: '(CF)(Network)/711\.(\d)' + os_replacement: 'iOS' + os_v1_replacement: '8' + - regex: '(CF)(Network)/(720)\.(\d)' + os_replacement: 'Mac OS X' + os_v1_replacement: '10' + os_v2_replacement: '10' + - regex: '(CF)(Network)/758\.(\d)' + os_replacement: 'iOS' + os_v1_replacement: '9' + + ########## + # CFNetwork iOS Apps + # @ref: https://en.wikipedia.org/wiki/Darwin_(operating_system)#Release_history + ########## + - regex: 'CFNetwork/.* Darwin/(9)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '1' + - regex: 'CFNetwork/.* Darwin/(10)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '4' + - regex: 'CFNetwork/.* Darwin/(11)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '5' + - regex: 'CFNetwork/.* Darwin/(13)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '6' + - regex: 'CFNetwork/6.* Darwin/(14)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '7' + - regex: 'CFNetwork/7.* Darwin/(14)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '8' + os_v2_replacement: '0' + - regex: 'CFNetwork/7.* Darwin/(15)\.\d+' + os_replacement: 'iOS' + os_v1_replacement: '9' + os_v2_replacement: '0' + # iOS Apps + - regex: '\b(iOS[ /]|iPhone(?:/| v|[ _]OS[/,]|; | OS : |\d,\d/|\d,\d; )|iPad/)(\d{1,2})[_\.](\d{1,2})(?:[_\.](\d+))?' + os_replacement: 'iOS' + + ########## + # Apple TV + ########## + - regex: '(tvOS)/(\d+).(\d+)' + os_replacement: 'tvOS' + + ########## + # Chrome OS + # if version 0.0.0, probably this stuff: + # http://code.google.com/p/chromium-os/issues/detail?id=11573 + # http://code.google.com/p/chromium-os/issues/detail?id=13790 + ########## + - regex: '(CrOS) [a-z0-9_]+ (\d+)\.(\d+)(?:\.(\d+))?' + os_replacement: 'Chrome OS' + + ########## + # Linux distros + ########## + - regex: '([Dd]ebian)' + os_replacement: 'Debian' + - regex: '(Linux Mint)(?:/(\d+))?' + - regex: '(Mandriva)(?: Linux)?/(?:[\d.-]+m[a-z]{2}(\d+).(\d))?' + + ########## + # Symbian + Symbian OS + # http://en.wikipedia.org/wiki/History_of_Symbian + ########## + - regex: '(Symbian[Oo][Ss])[/ ](\d+)\.(\d+)' + os_replacement: 'Symbian OS' + - regex: '(Symbian/3).+NokiaBrowser/7\.3' + os_replacement: 'Symbian^3 Anna' + - regex: '(Symbian/3).+NokiaBrowser/7\.4' + os_replacement: 'Symbian^3 Belle' + - regex: '(Symbian/3)' + os_replacement: 'Symbian^3' + - regex: '\b(Series 60|SymbOS|S60Version|S60V\d|S60\b)' + os_replacement: 'Symbian OS' + - regex: '(MeeGo)' + - regex: 'Symbian [Oo][Ss]' + os_replacement: 'Symbian OS' + - regex: 'Series40;' + os_replacement: 'Nokia Series 40' + - regex: 'Series30Plus;' + os_replacement: 'Nokia Series 30 Plus' + + ########## + # BlackBerry devices + ########## + - regex: '(BB10);.+Version/(\d+)\.(\d+)\.(\d+)' + os_replacement: 'BlackBerry OS' + - regex: '(Black[Bb]erry)[0-9a-z]+/(\d+)\.(\d+)\.(\d+)(?:\.(\d+))?' + os_replacement: 'BlackBerry OS' + - regex: '(Black[Bb]erry).+Version/(\d+)\.(\d+)\.(\d+)(?:\.(\d+))?' + os_replacement: 'BlackBerry OS' + - regex: '(RIM Tablet OS) (\d+)\.(\d+)\.(\d+)' + os_replacement: 'BlackBerry Tablet OS' + - regex: '(Play[Bb]ook)' + os_replacement: 'BlackBerry Tablet OS' + - regex: '(Black[Bb]erry)' + os_replacement: 'BlackBerry OS' + + ########## + # Firefox OS + ########## + - regex: '\((?:Mobile|Tablet);.+Gecko/18.0 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '1' + os_v2_replacement: '0' + os_v3_replacement: '1' + + - regex: '\((?:Mobile|Tablet);.+Gecko/18.1 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '1' + os_v2_replacement: '1' + + - regex: '\((?:Mobile|Tablet);.+Gecko/26.0 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '1' + os_v2_replacement: '2' + + - regex: '\((?:Mobile|Tablet);.+Gecko/28.0 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '1' + os_v2_replacement: '3' + + - regex: '\((?:Mobile|Tablet);.+Gecko/30.0 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '1' + os_v2_replacement: '4' + + - regex: '\((?:Mobile|Tablet);.+Gecko/32.0 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '2' + os_v2_replacement: '0' + + - regex: '\((?:Mobile|Tablet);.+Gecko/34.0 Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + os_v1_replacement: '2' + os_v2_replacement: '1' + + # Firefox OS Generic + - regex: '\((?:Mobile|Tablet);.+Firefox/\d+\.\d+' + os_replacement: 'Firefox OS' + + + ########## + # BREW + # yes, Brew is lower-cased for Brew MP + ########## + - regex: '(BREW)[ /](\d+)\.(\d+)\.(\d+)' + - regex: '(BREW);' + - regex: '(Brew MP|BMP)[ /](\d+)\.(\d+)\.(\d+)' + os_replacement: 'Brew MP' + - regex: 'BMP;' + os_replacement: 'Brew MP' + + ########## + # Google TV + ########## + - regex: '(GoogleTV)(?: (\d+)\.(\d+)(?:\.(\d+))?|/[\da-z]+)' + + - regex: '(WebTV)/(\d+).(\d+)' + + ########## + # Misc mobile + ########## + - regex: '(hpw|web)OS/(\d+)\.(\d+)(?:\.(\d+))?' + os_replacement: 'webOS' + - regex: '(VRE);' + + ########## + # Generic patterns + # since the majority of os cases are very specific, these go last + ########## + - regex: '(Fedora|Red Hat|PCLinuxOS|Puppy|Ubuntu|Kindle|Bada|Lubuntu|BackTrack|Slackware|(?:Free|Open|Net|\b)BSD)[/ ](\d+)\.(\d+)(?:\.(\d+)(?:\.(\d+))?)?' + + # Gentoo Linux + Kernel Version + - regex: '(Linux)[ /](\d+)\.(\d+)(?:\.(\d+))?.*gentoo' + os_replacement: 'Gentoo' + + # Opera Mini Bada + - regex: '\((Bada);' + + # just os + - regex: '(Windows|Android|WeTab|Maemo)' + - regex: '(Ubuntu|Kubuntu|Arch Linux|CentOS|Slackware|Gentoo|openSUSE|SUSE|Red Hat|Fedora|PCLinuxOS|Mageia|(?:Free|Open|Net|\b)BSD)' + # Linux + Kernel Version + - regex: '(Linux)(?:[ /](\d+)\.(\d+)(?:\.(\d+))?)?' + - regex: 'SunOS' + os_replacement: 'Solaris' + + # Roku Digital-Video-Players https://www.roku.com/ + - regex: '^(Roku)/DVP-(\d+)\.(\d+)' + +device_parsers: + + ######### + # Mobile Spiders + # Catch the mobile crawler before checking for iPhones / Androids. + ######### + - regex: '(?:(?:iPhone|Windows CE|Android).*(?:(?:Bot|Yeti)-Mobile|YRSpider|bots?/\d|(?:bot|spider)\.html)|AdsBot-Google-Mobile.*iPhone)' + regex_flag: 'i' + device_replacement: 'Spider' + brand_replacement: 'Spider' + model_replacement: 'Smartphone' + - regex: '(?:DoCoMo|\bMOT\b|\bLG\b|Nokia|Samsung|SonyEricsson).*(?:(?:Bot|Yeti)-Mobile|bots?/\d|(?:bot|crawler)\.html|(?:jump|google|Wukong)bot|ichiro/mobile|/spider|YahooSeeker)' + regex_flag: 'i' + device_replacement: 'Spider' + brand_replacement: 'Spider' + model_replacement: 'Feature Phone' + + ######### + # WebBrowser for SmartWatch + # @ref: https://play.google.com/store/apps/details?id=se.vaggan.webbrowser&hl=en + ######### + - regex: '\bSmartWatch *\( *([^;]+) *; *([^;]+) *;' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ###################################################################### + # Android parsers + # + # @ref: https://support.google.com/googleplay/answer/1727131?hl=en + ###################################################################### + + # Android Application + - regex: 'Android Application[^\-]+ - (Sony) ?(Ericsson)? (.+) \w+ - ' + device_replacement: '$1 $2' + brand_replacement: '$1$2' + model_replacement: '$3' + - regex: 'Android Application[^\-]+ - (?:HTC|HUAWEI|LGE|LENOVO|MEDION|TCT) (HTC|HUAWEI|LG|LENOVO|MEDION|ALCATEL)[ _\-](.+) \w+ - ' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + - regex: 'Android Application[^\-]+ - ([^ ]+) (.+) \w+ - ' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # 3Q + # @ref: http://www.3q-int.com/ + ######### + - regex: '; *([BLRQ]C\d{4}[A-Z]+) +Build/' + device_replacement: '3Q $1' + brand_replacement: '3Q' + model_replacement: '$1' + - regex: '; *(?:3Q_)([^;/]+) +Build' + device_replacement: '3Q $1' + brand_replacement: '3Q' + model_replacement: '$1' + + ######### + # Acer + # @ref: http://us.acer.com/ac/en/US/content/group/tablets + ######### + - regex: 'Android [34].*; *(A100|A101|A110|A200|A210|A211|A500|A501|A510|A511|A700(?: Lite| 3G)?|A701|B1-A71|A1-\d{3}|B1-\d{3}|V360|V370|W500|W500P|W501|W501P|W510|W511|W700|Slider SL101|DA22[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Acer' + model_replacement: '$1' + - regex: '; *Acer Iconia Tab ([^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Acer' + model_replacement: '$1' + - regex: '; *(Z1[1235]0|E320[^/]*|S500|S510|Liquid[^;/]*|Iconia A\d+) Build' + device_replacement: '$1' + brand_replacement: 'Acer' + model_replacement: '$1' + - regex: '; *(Acer |ACER )([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Acer' + model_replacement: '$2' + + ######### + # Advent + # @ref: https://en.wikipedia.org/wiki/Advent_Vega + # @note: VegaBean and VegaComb (names derived from jellybean, honeycomb) are + # custom ROM builds for Vega + ######### + - regex: '; *(Advent )?(Vega(?:Bean|Comb)?).* Build' + device_replacement: '$1$2' + brand_replacement: 'Advent' + model_replacement: '$2' + + ######### + # Ainol + # @ref: http://www.ainol.com/plugin.php?identifier=ainol&module=product + ######### + - regex: '; *(Ainol )?((?:NOVO|[Nn]ovo)[^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Ainol' + model_replacement: '$2' + + ######### + # Airis + # @ref: http://airis.es/Tienda/Default.aspx?idG=001 + ######### + - regex: '; *AIRIS[ _\-]?([^/;\)]+) *(?:;|\)|Build)' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Airis' + model_replacement: '$1' + - regex: '; *(OnePAD[^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Airis' + model_replacement: '$1' + + ######### + # Airpad + # @ref: ?? + ######### + - regex: '; *Airpad[ \-]([^;/]+) Build' + device_replacement: 'Airpad $1' + brand_replacement: 'Airpad' + model_replacement: '$1' + + ######### + # Alcatel - TCT + # @ref: http://www.alcatelonetouch.com/global-en/products/smartphones.html + ######### + - regex: '; *(one ?touch) (EVO7|T10|T20) Build' + device_replacement: 'Alcatel One Touch $2' + brand_replacement: 'Alcatel' + model_replacement: 'One Touch $2' + - regex: '; *(?:alcatel[ _])?(?:(?:one[ _]?touch[ _])|ot[ \-])([^;/]+);? Build' + regex_flag: 'i' + device_replacement: 'Alcatel One Touch $1' + brand_replacement: 'Alcatel' + model_replacement: 'One Touch $1' + - regex: '; *(TCL)[ _]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + # operator specific models + - regex: '; *(Vodafone Smart II|Optimus_Madrid) Build' + device_replacement: 'Alcatel $1' + brand_replacement: 'Alcatel' + model_replacement: '$1' + - regex: '; *BASE_Lutea_3 Build' + device_replacement: 'Alcatel One Touch 998' + brand_replacement: 'Alcatel' + model_replacement: 'One Touch 998' + - regex: '; *BASE_Varia Build' + device_replacement: 'Alcatel One Touch 918D' + brand_replacement: 'Alcatel' + model_replacement: 'One Touch 918D' + + ######### + # Allfine + # @ref: http://www.myallfine.com/Products.asp + ######### + - regex: '; *((?:FINE|Fine)\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Allfine' + model_replacement: '$1' + + ######### + # Allview + # @ref: http://www.allview.ro/produse/droseries/lista-tablete-pc/ + ######### + - regex: '; *(ALLVIEW[ _]?|Allview[ _]?)((?:Speed|SPEED).*) Build/' + device_replacement: '$1$2' + brand_replacement: 'Allview' + model_replacement: '$2' + - regex: '; *(ALLVIEW[ _]?|Allview[ _]?)?(AX1_Shine|AX2_Frenzy) Build' + device_replacement: '$1$2' + brand_replacement: 'Allview' + model_replacement: '$2' + - regex: '; *(ALLVIEW[ _]?|Allview[ _]?)([^;/]*) Build' + device_replacement: '$1$2' + brand_replacement: 'Allview' + model_replacement: '$2' + + ######### + # Allwinner + # @ref: http://www.allwinner.com/ + # @models: A31 (13.3"),A20,A10, + ######### + - regex: '; *(A13-MID) Build' + device_replacement: '$1' + brand_replacement: 'Allwinner' + model_replacement: '$1' + - regex: '; *(Allwinner)[ _\-]?([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Allwinner' + model_replacement: '$1' + + ######### + # Amaway + # @ref: http://www.amaway.cn/ + ######### + - regex: '; *(A651|A701B?|A702|A703|A705|A706|A707|A711|A712|A713|A717|A722|A785|A801|A802|A803|A901|A902|A1002|A1003|A1006|A1007|A9701|A9703|Q710|Q80) Build' + device_replacement: '$1' + brand_replacement: 'Amaway' + model_replacement: '$1' + + ######### + # Amoi + # @ref: http://www.amoi.com/en/prd/prd_index.jspx + ######### + - regex: '; *(?:AMOI|Amoi)[ _]([^;/]+) Build' + device_replacement: 'Amoi $1' + brand_replacement: 'Amoi' + model_replacement: '$1' + - regex: '^(?:AMOI|Amoi)[ _]([^;/]+) Linux' + device_replacement: 'Amoi $1' + brand_replacement: 'Amoi' + model_replacement: '$1' + + ######### + # Aoc + # @ref: http://latin.aoc.com/media_tablet + ######### + - regex: '; *(MW(?:0[789]|10)[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Aoc' + model_replacement: '$1' + + ######### + # Aoson + # @ref: http://www.luckystar.com.cn/en/mid.aspx?page=1 + # @ref: http://www.luckystar.com.cn/en/mobiletel.aspx?page=1 + # @note: brand owned by luckystar + ######### + - regex: '; *(G7|M1013|M1015G|M11[CG]?|M-?12[B]?|M15|M19[G]?|M30[ACQ]?|M31[GQ]|M32|M33[GQ]|M36|M37|M38|M701T|M710|M712B|M713|M715G|M716G|M71(?:G|GS|T)?|M72[T]?|M73[T]?|M75[GT]?|M77G|M79T|M7L|M7LN|M81|M810|M81T|M82|M92|M92KS|M92S|M717G|M721|M722G|M723|M725G|M739|M785|M791|M92SK|M93D) Build' + device_replacement: 'Aoson $1' + brand_replacement: 'Aoson' + model_replacement: '$1' + - regex: '; *Aoson ([^;/]+) Build' + regex_flag: 'i' + device_replacement: 'Aoson $1' + brand_replacement: 'Aoson' + model_replacement: '$1' + + ######### + # Apanda + # @ref: http://www.apanda.com.cn/ + ######### + - regex: '; *[Aa]panda[ _\-]([^;/]+) Build' + device_replacement: 'Apanda $1' + brand_replacement: 'Apanda' + model_replacement: '$1' + + ######### + # Archos + # @ref: http://www.archos.com/de/products/tablets.html + # @ref: http://www.archos.com/de/products/smartphones/index.html + ######### + - regex: '; *(?:ARCHOS|Archos) ?(GAMEPAD.*?)(?: Build|[;/\(\)\-])' + device_replacement: 'Archos $1' + brand_replacement: 'Archos' + model_replacement: '$1' + - regex: 'ARCHOS; GOGI; ([^;]+);' + device_replacement: 'Archos $1' + brand_replacement: 'Archos' + model_replacement: '$1' + - regex: '(?:ARCHOS|Archos)[ _]?(.*?)(?: Build|[;/\(\)\-]|$)' + device_replacement: 'Archos $1' + brand_replacement: 'Archos' + model_replacement: '$1' + - regex: '; *(AN(?:7|8|9|10|13)[A-Z0-9]{1,4}) Build' + device_replacement: 'Archos $1' + brand_replacement: 'Archos' + model_replacement: '$1' + - regex: '; *(A28|A32|A43|A70(?:BHT|CHT|HB|S|X)|A101(?:B|C|IT)|A7EB|A7EB-WK|101G9|80G9) Build' + device_replacement: 'Archos $1' + brand_replacement: 'Archos' + model_replacement: '$1' + + ######### + # A-rival + # @ref: http://www.a-rival.de/de/ + ######### + - regex: '; *(PAD-FMD[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Arival' + model_replacement: '$1' + - regex: '; *(BioniQ) ?([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Arival' + model_replacement: '$1 $2' + + ######### + # Arnova + # @ref: http://arnovatech.com/ + ######### + - regex: '; *(AN\d[^;/]+|ARCHM\d+) Build' + device_replacement: 'Arnova $1' + brand_replacement: 'Arnova' + model_replacement: '$1' + - regex: '; *(?:ARNOVA|Arnova) ?([^;/]+) Build' + device_replacement: 'Arnova $1' + brand_replacement: 'Arnova' + model_replacement: '$1' + + ######### + # Assistant + # @ref: http://www.assistant.ua + ######### + - regex: '; *(?:ASSISTANT )?(AP)-?([1789]\d{2}[A-Z]{0,2}|80104) Build' + device_replacement: 'Assistant $1-$2' + brand_replacement: 'Assistant' + model_replacement: '$1-$2' + + ######### + # Asus + # @ref: http://www.asus.com/uk/Tablets_Mobile/ + ######### + - regex: '; *(ME17\d[^;/]*|ME3\d{2}[^;/]+|K00[A-Z]|Nexus 10|Nexus 7(?: 2013)?|PadFone[^;/]*|Transformer[^;/]*|TF\d{3}[^;/]*|eeepc) Build' + device_replacement: 'Asus $1' + brand_replacement: 'Asus' + model_replacement: '$1' + - regex: '; *ASUS[ _]*([^;/]+) Build' + device_replacement: 'Asus $1' + brand_replacement: 'Asus' + model_replacement: '$1' + + ######### + # Garmin-Asus + ######### + - regex: '; *Garmin-Asus ([^;/]+) Build' + device_replacement: 'Garmin-Asus $1' + brand_replacement: 'Garmin-Asus' + model_replacement: '$1' + - regex: '; *(Garminfone) Build' + device_replacement: 'Garmin $1' + brand_replacement: 'Garmin-Asus' + model_replacement: '$1' + + ######### + # Attab + # @ref: http://www.theattab.com/ + ######### + - regex: '; (@TAB-[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Attab' + model_replacement: '$1' + + ######### + # Audiosonic + # @ref: ?? + # @note: Take care with Docomo T-01 Toshiba + ######### + - regex: '; *(T-(?:07|[^0]\d)[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Audiosonic' + model_replacement: '$1' + + ######### + # Axioo + # @ref: http://www.axiooworld.com/ww/index.php + ######### + - regex: '; *(?:Axioo[ _\-]([^;/]+)|(picopad)[ _\-]([^;/]+)) Build' + regex_flag: 'i' + device_replacement: 'Axioo $1$2 $3' + brand_replacement: 'Axioo' + model_replacement: '$1$2 $3' + + ######### + # Azend + # @ref: http://azendcorp.com/index.php/products/portable-electronics + ######### + - regex: '; *(V(?:100|700|800)[^;/]*) Build' + device_replacement: '$1' + brand_replacement: 'Azend' + model_replacement: '$1' + + ######### + # Bak + # @ref: http://www.bakinternational.com/produtos.php?cat=80 + ######### + - regex: '; *(IBAK\-[^;/]*) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Bak' + model_replacement: '$1' + + ######### + # Bedove + # @ref: http://www.bedove.com/product.html + # @models: HY6501|HY5001|X12|X21|I5 + ######### + - regex: '; *(HY5001|HY6501|X12|X21|I5) Build' + device_replacement: 'Bedove $1' + brand_replacement: 'Bedove' + model_replacement: '$1' + + ######### + # Benss + # @ref: http://www.benss.net/ + ######### + - regex: '; *(JC-[^;/]*) Build' + device_replacement: 'Benss $1' + brand_replacement: 'Benss' + model_replacement: '$1' + + ######### + # Blackberry + # @ref: http://uk.blackberry.com/ + # @note: Android Apps seams to be used here + ######### + - regex: '; *(BB) ([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Blackberry' + model_replacement: '$2' + + ######### + # Blackbird + # @ref: http://iblackbird.co.kr + ######### + - regex: '; *(BlackBird)[ _](I8.*) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + - regex: '; *(BlackBird)[ _](.*) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Blaupunkt + # @ref: http://www.blaupunkt.com + ######### + # Endeavour + - regex: '; *([0-9]+BP[EM][^;/]*|Endeavour[^;/]+) Build' + device_replacement: 'Blaupunkt $1' + brand_replacement: 'Blaupunkt' + model_replacement: '$1' + + ######### + # Blu + # @ref: http://bluproducts.com + ######### + - regex: '; *((?:BLU|Blu)[ _\-])([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Blu' + model_replacement: '$2' + # BMOBILE = operator branded device + - regex: '; *(?:BMOBILE )?(Blu|BLU|DASH [^;/]+|VIVO 4\.3|TANK 4\.5) Build' + device_replacement: '$1' + brand_replacement: 'Blu' + model_replacement: '$1' + + ######### + # Blusens + # @ref: http://www.blusens.com/es/?sg=1&sv=al&roc=1 + ######### + # tablet + - regex: '; *(TOUCH\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Blusens' + model_replacement: '$1' + + ######### + # Bmobile + # @ref: http://bmobile.eu.com/?categoria=smartphones-2 + # @note: Might collide with Maxx as AX is used also there. + ######### + # smartphone + - regex: '; *(AX5\d+) Build' + device_replacement: '$1' + brand_replacement: 'Bmobile' + model_replacement: '$1' + + ######### + # bq + # @ref: http://bqreaders.com + ######### + - regex: '; *([Bb]q) ([^;/]+);? Build' + device_replacement: '$1 $2' + brand_replacement: 'bq' + model_replacement: '$2' + - regex: '; *(Maxwell [^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'bq' + model_replacement: '$1' + + ######### + # Braun Phototechnik + # @ref: http://www.braun-phototechnik.de/en/products/list/~pcat.250/Tablet-PC.html + ######### + - regex: '; *((?:B-Tab|B-TAB) ?\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Braun' + model_replacement: '$1' + + ######### + # Broncho + # @ref: http://www.broncho.cn/ + ######### + - regex: '; *(Broncho) ([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Captiva + # @ref: http://www.captiva-power.de + ######### + - regex: '; *CAPTIVA ([^;/]+) Build' + device_replacement: 'Captiva $1' + brand_replacement: 'Captiva' + model_replacement: '$1' + + ######### + # Casio + # @ref: http://www.casiogzone.com/ + ######### + - regex: '; *(C771|CAL21|IS11CA) Build' + device_replacement: '$1' + brand_replacement: 'Casio' + model_replacement: '$1' + + ######### + # Cat + # @ref: http://www.cat-sound.com + ######### + - regex: '; *(?:Cat|CAT) ([^;/]+) Build' + device_replacement: 'Cat $1' + brand_replacement: 'Cat' + model_replacement: '$1' + - regex: '; *(?:Cat)(Nova.*) Build' + device_replacement: 'Cat $1' + brand_replacement: 'Cat' + model_replacement: '$1' + - regex: '; *(INM8002KP|ADM8000KP_[AB]) Build' + device_replacement: '$1' + brand_replacement: 'Cat' + model_replacement: 'Tablet PHOENIX 8.1J0' + + ######### + # Celkon + # @ref: http://www.celkonmobiles.com/?_a=products + # @models: A10, A19Q, A101, A105, A107, A107\+, A112, A118, A119, A119Q, A15, A19, A20, A200, A220, A225, A22 Race, A27, A58, A59, A60, A62, A63, A64, A66, A67, A69, A75, A77, A79, A8\+, A83, A85, A86, A87, A89 Ultima, A9\+, A90, A900, A95, A97i, A98, AR 40, AR 45, AR 50, ML5 + ######### + - regex: '; *(?:[Cc]elkon[ _\*]|CELKON[ _\*])([^;/\)]+) ?(?:Build|;|\))' + device_replacement: '$1' + brand_replacement: 'Celkon' + model_replacement: '$1' + - regex: 'Build/(?:[Cc]elkon)+_?([^;/_\)]+)' + device_replacement: '$1' + brand_replacement: 'Celkon' + model_replacement: '$1' + - regex: '; *(CT)-?(\d+) Build' + device_replacement: '$1$2' + brand_replacement: 'Celkon' + model_replacement: '$1$2' + # smartphones + - regex: '; *(A19|A19Q|A105|A107[^;/\)]*) ?(?:Build|;|\))' + device_replacement: '$1' + brand_replacement: 'Celkon' + model_replacement: '$1' + + ######### + # ChangJia + # @ref: http://www.cjshowroom.com/eproducts.aspx?classcode=004001001 + # @brief: China manufacturer makes tablets for different small brands + # (eg. http://www.zeepad.net/index.html) + ######### + - regex: '; *(TPC[0-9]{4,5}) Build' + device_replacement: '$1' + brand_replacement: 'ChangJia' + model_replacement: '$1' + + ######### + # Cloudfone + # @ref: http://www.cloudfonemobile.com/ + ######### + - regex: '; *(Cloudfone)[ _](Excite)([^ ][^;/]+) Build' + device_replacement: '$1 $2 $3' + brand_replacement: 'Cloudfone' + model_replacement: '$1 $2 $3' + - regex: '; *(Excite|ICE)[ _](\d+[^;/]+) Build' + device_replacement: 'Cloudfone $1 $2' + brand_replacement: 'Cloudfone' + model_replacement: 'Cloudfone $1 $2' + - regex: '; *(Cloudfone|CloudPad)[ _]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Cloudfone' + model_replacement: '$1 $2' + + ######### + # Cmx + # @ref: http://cmx.at/de/ + ######### + - regex: '; *((?:Aquila|Clanga|Rapax)[^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Cmx' + model_replacement: '$1' + + ######### + # CobyKyros + # @ref: http://cobykyros.com + # @note: Be careful with MID\d{3} from MpMan or Manta + ######### + - regex: '; *(?:CFW-|Kyros )?(MID[0-9]{4}(?:[ABC]|SR|TV)?)(\(3G\)-4G| GB 8K| 3G| 8K| GB)? *(?:Build|[;\)])' + device_replacement: 'CobyKyros $1$2' + brand_replacement: 'CobyKyros' + model_replacement: '$1$2' + + ######### + # Coolpad + # @ref: ?? + ######### + - regex: '; *([^;/]*)Coolpad[ _]([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Coolpad' + model_replacement: '$1$2' + + ######### + # Cube + # @ref: http://www.cube-tablet.com/buy-products.html + ######### + - regex: '; *(CUBE[ _])?([KU][0-9]+ ?GT.*|A5300) Build' + regex_flag: 'i' + device_replacement: '$1$2' + brand_replacement: 'Cube' + model_replacement: '$2' + + ######### + # Cubot + # @ref: http://www.cubotmall.com/ + ######### + - regex: '; *CUBOT ([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Cubot' + model_replacement: '$1' + - regex: '; *(BOBBY) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Cubot' + model_replacement: '$1' + + ######### + # Danew + # @ref: http://www.danew.com/produits-tablette.php + ######### + - regex: '; *(Dslide [^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Danew' + model_replacement: '$1' + + ######### + # Dell + # @ref: http://www.dell.com + # @ref: http://www.softbank.jp/mobile/support/product/101dl/ + # @ref: http://www.softbank.jp/mobile/support/product/001dl/ + # @ref: http://developer.emnet.ne.jp/android.html + # @ref: http://www.dell.com/in/p/mobile-xcd28/pd + # @ref: http://www.dell.com/in/p/mobile-xcd35/pd + ######### + - regex: '; *(XCD)[ _]?(28|35) Build' + device_replacement: 'Dell $1$2' + brand_replacement: 'Dell' + model_replacement: '$1$2' + - regex: '; *(001DL) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: 'Streak' + - regex: '; *(?:Dell|DELL) (Streak) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: 'Streak' + - regex: '; *(101DL|GS01|Streak Pro[^;/]*) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: 'Streak Pro' + - regex: '; *([Ss]treak ?7) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: 'Streak 7' + - regex: '; *(Mini-3iX) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: '$1' + - regex: '; *(?:Dell|DELL)[ _](Aero|Venue|Thunder|Mini.*|Streak[ _]Pro) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: '$1' + - regex: '; *Dell[ _]([^;/]+) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: '$1' + - regex: '; *Dell ([^;/]+) Build' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: '$1' + + ######### + # Denver + # @ref: http://www.denver-electronics.com/tablets1/ + ######### + - regex: '; *(TA[CD]-\d+[^;/]*) Build' + device_replacement: '$1' + brand_replacement: 'Denver' + model_replacement: '$1' + + ######### + # Dex + # @ref: http://dex.ua/ + ######### + - regex: '; *(iP[789]\d{2}(?:-3G)?|IP10\d{2}(?:-8GB)?) Build' + device_replacement: '$1' + brand_replacement: 'Dex' + model_replacement: '$1' + + ######### + # DNS AirTab + # @ref: http://www.dns-shop.ru/ + ######### + - regex: '; *(AirTab)[ _\-]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'DNS' + model_replacement: '$1 $2' + + ######### + # Docomo (Operator Branded Device) + # @ref: http://www.ipentec.com/document/document.aspx?page=android-useragent + ######### + - regex: '; *(F\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Fujitsu' + model_replacement: '$1' + - regex: '; *(HT-03A) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'Magic' + - regex: '; *(HT\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: '$1' + - regex: '; *(L\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'LG' + model_replacement: '$1' + - regex: '; *(N\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Nec' + model_replacement: '$1' + - regex: '; *(P\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Panasonic' + model_replacement: '$1' + - regex: '; *(SC\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Samsung' + model_replacement: '$1' + - regex: '; *(SH\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Sharp' + model_replacement: '$1' + - regex: '; *(SO\-\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'SonyEricsson' + model_replacement: '$1' + - regex: '; *(T\-0[12][^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Toshiba' + model_replacement: '$1' + + ######### + # DOOV + # @ref: http://www.doov.com.cn/ + ######### + - regex: '; *(DOOV)[ _]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'DOOV' + model_replacement: '$2' + + ######### + # Enot + # @ref: http://www.enot.ua/ + ######### + - regex: '; *(Enot|ENOT)[ -]?([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Enot' + model_replacement: '$2' + + ######### + # Evercoss + # @ref: http://evercoss.com/android/ + ######### + - regex: '; *[^;/]+ Build/(?:CROSS|Cross)+[ _\-]([^\)]+)' + device_replacement: 'CROSS $1' + brand_replacement: 'Evercoss' + model_replacement: 'Cross $1' + - regex: '; *(CROSS|Cross)[ _\-]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Evercoss' + model_replacement: 'Cross $2' + + ######### + # Explay + # @ref: http://explay.ru/ + ######### + - regex: '; *Explay[_ ](.+?)(?:[\)]| Build)' + device_replacement: '$1' + brand_replacement: 'Explay' + model_replacement: '$1' + + ######### + # Fly + # @ref: http://www.fly-phone.com/ + ######### + - regex: '; *(IQ.*) Build' + device_replacement: '$1' + brand_replacement: 'Fly' + model_replacement: '$1' + - regex: '; *(Fly|FLY)[ _](IQ[^;]+|F[34]\d+[^;]*);? Build' + device_replacement: '$1 $2' + brand_replacement: 'Fly' + model_replacement: '$2' + + ######### + # Fujitsu + # @ref: http://www.fujitsu.com/global/ + ######### + - regex: '; *(M532|Q572|FJL21) Build/' + device_replacement: '$1' + brand_replacement: 'Fujitsu' + model_replacement: '$1' + + ######### + # Galapad + # @ref: http://www.galapad.net/product.html + ######### + - regex: '; *(G1) Build' + device_replacement: '$1' + brand_replacement: 'Galapad' + model_replacement: '$1' + + ######### + # Geeksphone + # @ref: http://www.geeksphone.com/ + ######### + - regex: '; *(Geeksphone) ([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Gfive + # @ref: http://www.gfivemobile.com/en + ######### + #- regex: '; *(G\'?FIVE) ([^;/]+) Build' # there is a problem with python yaml parser here + - regex: '; *(G[^F]?FIVE) ([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Gfive' + model_replacement: '$2' + + ######### + # Gionee + # @ref: http://www.gionee.com/ + ######### + - regex: '; *(Gionee)[ _\-]([^;/]+)(?:/[^;/]+)? Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Gionee' + model_replacement: '$2' + - regex: '; *(GN\d+[A-Z]?|INFINITY_PASSION|Ctrl_V1) Build' + device_replacement: 'Gionee $1' + brand_replacement: 'Gionee' + model_replacement: '$1' + - regex: '; *(E3) Build/JOP40D' + device_replacement: 'Gionee $1' + brand_replacement: 'Gionee' + model_replacement: '$1' + + ######### + # GoClever + # @ref: http://www.goclever.com + ######### + - regex: '; *((?:FONE|QUANTUM|INSIGNIA) \d+[^;/]*|PLAYTAB) Build' + device_replacement: 'GoClever $1' + brand_replacement: 'GoClever' + model_replacement: '$1' + - regex: '; *GOCLEVER ([^;/]+) Build' + device_replacement: 'GoClever $1' + brand_replacement: 'GoClever' + model_replacement: '$1' + + ######### + # Google + # @ref: http://www.google.de/glass/start/ + ######### + - regex: '; *(Glass \d+) Build' + device_replacement: '$1' + brand_replacement: 'Google' + model_replacement: '$1' + + ######### + # Gigabyte + # @ref: http://gsmart.gigabytecm.com/en/ + ######### + - regex: '; *(GSmart)[ -]([^/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Gigabyte' + model_replacement: '$1 $2' + + ######### + # Freescale development boards + # @ref: http://www.freescale.com/webapp/sps/site/prod_summary.jsp?code=IMX53QSB + ######### + - regex: '; *(imx5[13]_[^/]+) Build' + device_replacement: 'Freescale $1' + brand_replacement: 'Freescale' + model_replacement: '$1' + + ######### + # Haier + # @ref: http://www.haier.com/ + # @ref: http://www.haier.com/de/produkte/tablet/ + ######### + - regex: '; *Haier[ _\-]([^/]+) Build' + device_replacement: 'Haier $1' + brand_replacement: 'Haier' + model_replacement: '$1' + - regex: '; *(PAD1016) Build' + device_replacement: 'Haipad $1' + brand_replacement: 'Haipad' + model_replacement: '$1' + + ######### + # Haipad + # @ref: http://www.haipad.net/ + # @models: V7P|M7SM7S|M9XM9X|M7XM7X|M9|M8|M7-M|M1002|M7|M701 + ######### + - regex: '; *(M701|M7|M8|M9) Build' + device_replacement: 'Haipad $1' + brand_replacement: 'Haipad' + model_replacement: '$1' + + ######### + # Hannspree + # @ref: http://www.hannspree.eu/ + # @models: SN10T1|SN10T2|SN70T31B|SN70T32W + ######### + - regex: '; *(SN\d+T[^;\)/]*)(?: Build|[;\)])' + device_replacement: 'Hannspree $1' + brand_replacement: 'Hannspree' + model_replacement: '$1' + + ######### + # HCLme + # @ref: http://www.hclmetablet.com/india/ + ######### + - regex: 'Build/HCL ME Tablet ([^;\)]+)[\);]' + device_replacement: 'HCLme $1' + brand_replacement: 'HCLme' + model_replacement: '$1' + - regex: '; *([^;\/]+) Build/HCL' + device_replacement: 'HCLme $1' + brand_replacement: 'HCLme' + model_replacement: '$1' + + ######### + # Hena + # @ref: http://www.henadigital.com/en/product/index.asp?id=6 + ######### + - regex: '; *(MID-?\d{4}C[EM]) Build' + device_replacement: 'Hena $1' + brand_replacement: 'Hena' + model_replacement: '$1' + + ######### + # Hisense + # @ref: http://www.hisense.com/ + ######### + - regex: '; *(EG\d{2,}|HS-[^;/]+|MIRA[^;/]+) Build' + device_replacement: 'Hisense $1' + brand_replacement: 'Hisense' + model_replacement: '$1' + - regex: '; *(andromax[^;/]+) Build' + regex_flag: 'i' + device_replacement: 'Hisense $1' + brand_replacement: 'Hisense' + model_replacement: '$1' + + ######### + # hitech + # @ref: http://www.hitech-mobiles.com/ + ######### + - regex: '; *(?:AMAZE[ _](S\d+)|(S\d+)[ _]AMAZE) Build' + device_replacement: 'AMAZE $1$2' + brand_replacement: 'hitech' + model_replacement: 'AMAZE $1$2' + + ######### + # HP + # @ref: http://www.hp.com/ + ######### + - regex: '; *(PlayBook) Build' + device_replacement: 'HP $1' + brand_replacement: 'HP' + model_replacement: '$1' + - regex: '; *HP ([^/]+) Build' + device_replacement: 'HP $1' + brand_replacement: 'HP' + model_replacement: '$1' + - regex: '; *([^/]+_tenderloin) Build' + device_replacement: 'HP TouchPad' + brand_replacement: 'HP' + model_replacement: 'TouchPad' + + ######### + # Huawei + # @ref: http://www.huaweidevice.com + # @note: Needs to be before HTC due to Desire HD Build on U8815 + ######### + - regex: '; *(HUAWEI |Huawei-)?([UY][^;/]+) Build/(?:Huawei|HUAWEI)([UY][^\);]+)\)' + device_replacement: '$1$2' + brand_replacement: 'Huawei' + model_replacement: '$2' + - regex: '; *([^;/]+) Build[/ ]Huawei(MT1-U06|[A-Z]+\d+[^\);]+)[^\);]*\)' + device_replacement: '$1' + brand_replacement: 'Huawei' + model_replacement: '$2' + - regex: '; *(S7|M860) Build' + device_replacement: '$1' + brand_replacement: 'Huawei' + model_replacement: '$1' + - regex: '; *((?:HUAWEI|Huawei)[ \-]?)(MediaPad) Build' + device_replacement: '$1$2' + brand_replacement: 'Huawei' + model_replacement: '$2' + - regex: '; *((?:HUAWEI[ _]?|Huawei[ _])?Ascend[ _])([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Huawei' + model_replacement: '$2' + - regex: '; *((?:HUAWEI|Huawei)[ _\-]?)((?:G700-|MT-)[^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Huawei' + model_replacement: '$2' + - regex: '; *((?:HUAWEI|Huawei)[ _\-]?)([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Huawei' + model_replacement: '$2' + - regex: '; *(MediaPad[^;]+|SpringBoard) Build/Huawei' + device_replacement: '$1' + brand_replacement: 'Huawei' + model_replacement: '$1' + - regex: '; *([^;]+) Build/Huawei' + device_replacement: '$1' + brand_replacement: 'Huawei' + model_replacement: '$1' + - regex: '; *([Uu])([89]\d{3}) Build' + device_replacement: '$1$2' + brand_replacement: 'Huawei' + model_replacement: 'U$2' + - regex: '; *(?:Ideos |IDEOS )(S7) Build' + device_replacement: 'Huawei Ideos$1' + brand_replacement: 'Huawei' + model_replacement: 'Ideos$1' + - regex: '; *(?:Ideos |IDEOS )([^;/]+\s*|\s*)Build' + device_replacement: 'Huawei Ideos$1' + brand_replacement: 'Huawei' + model_replacement: 'Ideos$1' + - regex: '; *(Orange Daytona|Pulse|Pulse Mini|Vodafone 858|C8500|C8600|C8650|C8660|Nexus 6P) Build' + device_replacement: 'Huawei $1' + brand_replacement: 'Huawei' + model_replacement: '$1' + + ######### + # HTC + # @ref: http://www.htc.com/www/products/ + # @ref: http://en.wikipedia.org/wiki/List_of_HTC_phones + ######### + + - regex: '; *HTC[ _]([^;]+); Windows Phone' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + + # Android HTC with Version Number matcher + # ; HTC_0P3Z11/1.12.161.3 Build + # ;HTC_A3335 V2.38.841.1 Build + - regex: '; *(?:HTC[ _/])+([^ _/]+)(?:[/\\]1\.0 | V|/| +)\d+\.\d[\d\.]*(?: *Build|\))' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + - regex: '; *(?:HTC[ _/])+([^ _/]+)(?:[ _/]([^ _/]+))?(?:[/\\]1\.0 | V|/| +)\d+\.\d[\d\.]*(?: *Build|\))' + device_replacement: 'HTC $1 $2' + brand_replacement: 'HTC' + model_replacement: '$1 $2' + - regex: '; *(?:HTC[ _/])+([^ _/]+)(?:[ _/]([^ _/]+)(?:[ _/]([^ _/]+))?)?(?:[/\\]1\.0 | V|/| +)\d+\.\d[\d\.]*(?: *Build|\))' + device_replacement: 'HTC $1 $2 $3' + brand_replacement: 'HTC' + model_replacement: '$1 $2 $3' + - regex: '; *(?:HTC[ _/])+([^ _/]+)(?:[ _/]([^ _/]+)(?:[ _/]([^ _/]+)(?:[ _/]([^ _/]+))?)?)?(?:[/\\]1\.0 | V|/| +)\d+\.\d[\d\.]*(?: *Build|\))' + device_replacement: 'HTC $1 $2 $3 $4' + brand_replacement: 'HTC' + model_replacement: '$1 $2 $3 $4' + + # Android HTC without Version Number matcher + - regex: '; *(?:(?:HTC|htc)(?:_blocked)*[ _/])+([^ _/;]+)(?: *Build|[;\)]| - )' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + - regex: '; *(?:(?:HTC|htc)(?:_blocked)*[ _/])+([^ _/]+)(?:[ _/]([^ _/;\)]+))?(?: *Build|[;\)]| - )' + device_replacement: 'HTC $1 $2' + brand_replacement: 'HTC' + model_replacement: '$1 $2' + - regex: '; *(?:(?:HTC|htc)(?:_blocked)*[ _/])+([^ _/]+)(?:[ _/]([^ _/]+)(?:[ _/]([^ _/;\)]+))?)?(?: *Build|[;\)]| - )' + device_replacement: 'HTC $1 $2 $3' + brand_replacement: 'HTC' + model_replacement: '$1 $2 $3' + - regex: '; *(?:(?:HTC|htc)(?:_blocked)*[ _/])+([^ _/]+)(?:[ _/]([^ _/]+)(?:[ _/]([^ _/]+)(?:[ _/]([^ /;]+))?)?)?(?: *Build|[;\)]| - )' + device_replacement: 'HTC $1 $2 $3 $4' + brand_replacement: 'HTC' + model_replacement: '$1 $2 $3 $4' + + # HTC Streaming Player + - regex: 'HTC Streaming Player [^\/]*/[^\/]*/ htc_([^/]+) /' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + # general matcher for anything else + - regex: '(?:[;,] *|^)(?:htccn_chs-)?HTC[ _-]?([^;]+?)(?: *Build|clay|Android|-?Mozilla| Opera| Profile| UNTRUSTED|[;/\(\)]|$)' + regex_flag: 'i' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + # Android matchers without HTC + - regex: '; *(A6277|ADR6200|ADR6300|ADR6350|ADR6400[A-Z]*|ADR6425[A-Z]*|APX515CKT|ARIA|Desire[^_ ]*|Dream|EndeavorU|Eris|Evo|Flyer|HD2|Hero|HERO200|Hero CDMA|HTL21|Incredible|Inspire[A-Z0-9]*|Legend|Liberty|Nexus ?(?:One|HD2)|One|One S C2|One[ _]?(?:S|V|X\+?)\w*|PC36100|PG06100|PG86100|S31HT|Sensation|Wildfire)(?: Build|[/;\(\)])' + regex_flag: 'i' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + - regex: '; *(ADR6200|ADR6400L|ADR6425LVW|Amaze|DesireS?|EndeavorU|Eris|EVO|Evo\d[A-Z]+|HD2|IncredibleS?|Inspire[A-Z0-9]*|Inspire[A-Z0-9]*|Sensation[A-Z0-9]*|Wildfire)[ _-](.+?)(?:[/;\)]|Build|MIUI|1\.0)' + regex_flag: 'i' + device_replacement: 'HTC $1 $2' + brand_replacement: 'HTC' + model_replacement: '$1 $2' + + ######### + # Hyundai + # @ref: http://www.hyundaitechnologies.com + ######### + - regex: '; *HYUNDAI (T\d[^/]*) Build' + device_replacement: 'Hyundai $1' + brand_replacement: 'Hyundai' + model_replacement: '$1' + - regex: '; *HYUNDAI ([^;/]+) Build' + device_replacement: 'Hyundai $1' + brand_replacement: 'Hyundai' + model_replacement: '$1' + # X900? http://www.amazon.com/Hyundai-X900-Retina-Android-Bluetooth/dp/B00AO07H3O + - regex: '; *(X700|Hold X|MB-6900) Build' + device_replacement: 'Hyundai $1' + brand_replacement: 'Hyundai' + model_replacement: '$1' + + ######### + # iBall + # @ref: http://www.iball.co.in/Category/Mobiles/22 + ######### + - regex: '; *(?:iBall[ _\-])?(Andi)[ _]?(\d[^;/]*) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'iBall' + model_replacement: '$1 $2' + - regex: '; *(IBall)(?:[ _]([^;/]+)|) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'iBall' + model_replacement: '$2' + + ######### + # IconBIT + # @ref: http://www.iconbit.com/catalog/tablets/ + ######### + - regex: '; *(NT-\d+[^ ;/]*|Net[Tt]AB [^;/]+|Mercury [A-Z]+|iconBIT)(?: S/N:[^;/]+)? Build' + device_replacement: '$1' + brand_replacement: 'IconBIT' + model_replacement: '$1' + + ######### + # IMO + # @ref: http://www.ponselimo.com/ + ######### + - regex: '; *(IMO)[ _]([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'IMO' + model_replacement: '$2' + + ######### + # i-mobile + # @ref: http://www.i-mobilephone.com/ + ######### + - regex: '; *i-?mobile[ _]([^/]+) Build/' + regex_flag: 'i' + device_replacement: 'i-mobile $1' + brand_replacement: 'imobile' + model_replacement: '$1' + - regex: '; *(i-(?:style|note)[^/]*) Build/' + regex_flag: 'i' + device_replacement: 'i-mobile $1' + brand_replacement: 'imobile' + model_replacement: '$1' + + ######### + # Impression + # @ref: http://impression.ua/planshetnye-kompyutery + ######### + - regex: '; *(ImPAD) ?(\d+(?:.)*) Build' + device_replacement: '$1 $2' + brand_replacement: 'Impression' + model_replacement: '$1 $2' + + ######### + # Infinix + # @ref: http://www.infinixmobility.com/index.html + ######### + - regex: '; *(Infinix)[ _]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Infinix' + model_replacement: '$2' + + ######### + # Informer + # @ref: ?? + ######### + - regex: '; *(Informer)[ \-]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'Informer' + model_replacement: '$2' + + ######### + # Intenso + # @ref: http://www.intenso.de + # @models: 7":TAB 714,TAB 724;8":TAB 814,TAB 824;10":TAB 1004 + ######### + - regex: '; *(TAB) ?([78][12]4) Build' + device_replacement: 'Intenso $1' + brand_replacement: 'Intenso' + model_replacement: '$1 $2' + + ######### + # Intex + # @ref: http://intexmobile.in/index.aspx + # @note: Zync also offers a "Cloud Z5" device + ######### + # smartphones + - regex: '; *(?:Intex[ _])?(AQUA|Aqua)([ _\.\-])([^;/]+) *(?:Build|;)' + device_replacement: '$1$2$3' + brand_replacement: 'Intex' + model_replacement: '$1 $3' + # matches "INTEX CLOUD X1" + - regex: '; *(?:INTEX|Intex)(?:[_ ]([^\ _;/]+))(?:[_ ]([^\ _;/]+))? *(?:Build|;)' + device_replacement: '$1 $2' + brand_replacement: 'Intex' + model_replacement: '$1 $2' + # tablets + - regex: '; *([iI]Buddy)[ _]?(Connect)(?:_|\?_| )?([^;/]*) *(?:Build|;)' + device_replacement: '$1 $2 $3' + brand_replacement: 'Intex' + model_replacement: 'iBuddy $2 $3' + - regex: '; *(I-Buddy)[ _]([^;/]+) *(?:Build|;)' + device_replacement: '$1 $2' + brand_replacement: 'Intex' + model_replacement: 'iBuddy $2' + + ######### + # iOCEAN + # @ref: http://www.iocean.cc/ + ######### + - regex: '; *(iOCEAN) ([^/]+) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'iOCEAN' + model_replacement: '$2' + + ######### + # i.onik + # @ref: http://www.i-onik.de/ + ######### + - regex: '; *(TP\d+(?:\.\d+)?\-\d[^;/]+) Build' + device_replacement: 'ionik $1' + brand_replacement: 'ionik' + model_replacement: '$1' + + ######### + # IRU.ru + # @ref: http://www.iru.ru/catalog/soho/planetable/ + ######### + - regex: '; *(M702pro) Build' + device_replacement: '$1' + brand_replacement: 'Iru' + model_replacement: '$1' + + ######### + # Ivio + # @ref: http://www.ivio.com/mobile.php + # @models: DG80,DG20,DE38,DE88,MD70 + ######### + - regex: '; *(DE88Plus|MD70) Build' + device_replacement: '$1' + brand_replacement: 'Ivio' + model_replacement: '$1' + - regex: '; *IVIO[_\-]([^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Ivio' + model_replacement: '$1' + + ######### + # Jaytech + # @ref: http://www.jay-tech.de/jaytech/servlet/frontend/ + ######### + - regex: '; *(TPC-\d+|JAY-TECH) Build' + device_replacement: '$1' + brand_replacement: 'Jaytech' + model_replacement: '$1' + + ######### + # Jiayu + # @ref: http://www.ejiayu.com/en/Product.html + ######### + - regex: '; *(JY-[^;/]+|G[234]S?) Build' + device_replacement: '$1' + brand_replacement: 'Jiayu' + model_replacement: '$1' + + ######### + # JXD + # @ref: http://www.jxd.hk/ + ######### + - regex: '; *(JXD)[ _\-]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'JXD' + model_replacement: '$2' + + ######### + # Karbonn + # @ref: http://www.karbonnmobiles.com/products_tablet.php + ######### + - regex: '; *Karbonn[ _]?([^;/]+) *(?:Build|;)' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Karbonn' + model_replacement: '$1' + - regex: '; *([^;]+) Build/Karbonn' + device_replacement: '$1' + brand_replacement: 'Karbonn' + model_replacement: '$1' + - regex: '; *(A11|A39|A37|A34|ST8|ST10|ST7|Smart Tab3|Smart Tab2|Titanium S\d) +Build' + device_replacement: '$1' + brand_replacement: 'Karbonn' + model_replacement: '$1' + + ######### + # KDDI (Operator Branded Device) + # @ref: http://www.ipentec.com/document/document.aspx?page=android-useragent + ######### + - regex: '; *(IS01|IS03|IS05|IS\d{2}SH) Build' + device_replacement: '$1' + brand_replacement: 'Sharp' + model_replacement: '$1' + - regex: '; *(IS04) Build' + device_replacement: '$1' + brand_replacement: 'Regza' + model_replacement: '$1' + - regex: '; *(IS06|IS\d{2}PT) Build' + device_replacement: '$1' + brand_replacement: 'Pantech' + model_replacement: '$1' + - regex: '; *(IS11S) Build' + device_replacement: '$1' + brand_replacement: 'SonyEricsson' + model_replacement: 'Xperia Acro' + - regex: '; *(IS11CA) Build' + device_replacement: '$1' + brand_replacement: 'Casio' + model_replacement: 'GzOne $1' + - regex: '; *(IS11LG) Build' + device_replacement: '$1' + brand_replacement: 'LG' + model_replacement: 'Optimus X' + - regex: '; *(IS11N) Build' + device_replacement: '$1' + brand_replacement: 'Medias' + model_replacement: '$1' + - regex: '; *(IS11PT) Build' + device_replacement: '$1' + brand_replacement: 'Pantech' + model_replacement: 'MIRACH' + - regex: '; *(IS12F) Build' + device_replacement: '$1' + brand_replacement: 'Fujitsu' + model_replacement: 'Arrows ES' + # @ref: https://ja.wikipedia.org/wiki/IS12M + - regex: '; *(IS12M) Build' + device_replacement: '$1' + brand_replacement: 'Motorola' + model_replacement: 'XT909' + - regex: '; *(IS12S) Build' + device_replacement: '$1' + brand_replacement: 'SonyEricsson' + model_replacement: 'Xperia Acro HD' + - regex: '; *(ISW11F) Build' + device_replacement: '$1' + brand_replacement: 'Fujitsu' + model_replacement: 'Arrowz Z' + - regex: '; *(ISW11HT) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'EVO' + - regex: '; *(ISW11K) Build' + device_replacement: '$1' + brand_replacement: 'Kyocera' + model_replacement: 'DIGNO' + - regex: '; *(ISW11M) Build' + device_replacement: '$1' + brand_replacement: 'Motorola' + model_replacement: 'Photon' + - regex: '; *(ISW11SC) Build' + device_replacement: '$1' + brand_replacement: 'Samsung' + model_replacement: 'GALAXY S II WiMAX' + - regex: '; *(ISW12HT) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'EVO 3D' + - regex: '; *(ISW13HT) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'J' + - regex: '; *(ISW?[0-9]{2}[A-Z]{0,2}) Build' + device_replacement: '$1' + brand_replacement: 'KDDI' + model_replacement: '$1' + - regex: '; *(INFOBAR [^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'KDDI' + model_replacement: '$1' + + ######### + # Kingcom + # @ref: http://www.e-kingcom.com + ######### + - regex: '; *(JOYPAD|Joypad)[ _]([^;/]+) Build/' + device_replacement: '$1 $2' + brand_replacement: 'Kingcom' + model_replacement: '$1 $2' + + ######### + # Kobo + # @ref: https://en.wikipedia.org/wiki/Kobo_Inc. + # @ref: http://www.kobo.com/devices#tablets + ######### + - regex: '; *(Vox|VOX|Arc|K080) Build/' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Kobo' + model_replacement: '$1' + - regex: '\b(Kobo Touch)\b' + device_replacement: '$1' + brand_replacement: 'Kobo' + model_replacement: '$1' + + ######### + # K-Touch + # @ref: ?? + ######### + - regex: '; *(K-Touch)[ _]([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Ktouch' + model_replacement: '$2' + + ######### + # KT Tech + # @ref: http://www.kttech.co.kr + ######### + - regex: '; *((?:EV|KM)-S\d+[A-Z]?) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'KTtech' + model_replacement: '$1' + + ######### + # Kyocera + # @ref: http://www.android.com/devices/?country=all&m=kyocera + ######### + - regex: '; *(Zio|Hydro|Torque|Event|EVENT|Echo|Milano|Rise|URBANO PROGRESSO|WX04K|WX06K|WX10K|KYL21|101K|C5[12]\d{2}) Build/' + device_replacement: '$1' + brand_replacement: 'Kyocera' + model_replacement: '$1' + + ######### + # Lava + # @ref: http://www.lavamobiles.com/ + ######### + - regex: '; *(?:LAVA[ _])?IRIS[ _\-]?([^/;\)]+) *(?:;|\)|Build)' + regex_flag: 'i' + device_replacement: 'Iris $1' + brand_replacement: 'Lava' + model_replacement: 'Iris $1' + - regex: '; *LAVA[ _]([^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Lava' + model_replacement: '$1' + + ######### + # Lemon + # @ref: http://www.lemonmobiles.com/products.php?type=1 + ######### + - regex: '; *(?:(Aspire A1)|(?:LEMON|Lemon)[ _]([^;/]+))_? Build' + device_replacement: 'Lemon $1$2' + brand_replacement: 'Lemon' + model_replacement: '$1$2' + + ######### + # Lenco + # @ref: http://www.lenco.com/c/tablets/ + ######### + - regex: '; *(TAB-1012) Build/' + device_replacement: 'Lenco $1' + brand_replacement: 'Lenco' + model_replacement: '$1' + - regex: '; Lenco ([^;/]+) Build/' + device_replacement: 'Lenco $1' + brand_replacement: 'Lenco' + model_replacement: '$1' + + ######### + # Lenovo + # @ref: http://support.lenovo.com/en_GB/downloads/default.page?# + ######### + - regex: '; *(A1_07|A2107A-H|S2005A-H|S1-37AH0) Build' + device_replacement: '$1' + brand_replacement: 'Lenovo' + model_replacement: '$1' + - regex: '; *(Idea[Tp]ab)[ _]([^;/]+);? Build' + device_replacement: 'Lenovo $1 $2' + brand_replacement: 'Lenovo' + model_replacement: '$1 $2' + - regex: '; *(Idea(?:Tab|pad)) ?([^;/]+) Build' + device_replacement: 'Lenovo $1 $2' + brand_replacement: 'Lenovo' + model_replacement: '$1 $2' + - regex: '; *(ThinkPad) ?(Tablet) Build/' + device_replacement: 'Lenovo $1 $2' + brand_replacement: 'Lenovo' + model_replacement: '$1 $2' + - regex: '; *(?:LNV-)?(?:=?[Ll]enovo[ _\-]?|LENOVO[ _])+(.+?)(?:Build|[;/\)])' + device_replacement: 'Lenovo $1' + brand_replacement: 'Lenovo' + model_replacement: '$1' + - regex: '[;,] (?:Vodafone )?(SmartTab) ?(II) ?(\d+) Build/' + device_replacement: 'Lenovo $1 $2 $3' + brand_replacement: 'Lenovo' + model_replacement: '$1 $2 $3' + - regex: '; *(?:Ideapad )?K1 Build/' + device_replacement: 'Lenovo Ideapad K1' + brand_replacement: 'Lenovo' + model_replacement: 'Ideapad K1' + - regex: '; *(3GC101|3GW10[01]|A390) Build/' + device_replacement: '$1' + brand_replacement: 'Lenovo' + model_replacement: '$1' + - regex: '\b(?:Lenovo|LENOVO)+[ _\-]?([^,;:/ ]+)' + device_replacement: 'Lenovo $1' + brand_replacement: 'Lenovo' + model_replacement: '$1' + + ######### + # Lexibook + # @ref: http://www.lexibook.com/fr + ######### + - regex: '; *(MFC\d+)[A-Z]{2}([^;,/]*),? Build' + device_replacement: '$1$2' + brand_replacement: 'Lexibook' + model_replacement: '$1$2' + + ######### + # LG + # @ref: http://www.lg.com/uk/mobile + ######### + - regex: '; *(E[34][0-9]{2}|LS[6-8][0-9]{2}|VS[6-9][0-9]+[^;/]+|Nexus 4|Nexus 5X?|GT540f?|Optimus (?:2X|G|4X HD)|OptimusX4HD) *(?:Build|;)' + device_replacement: '$1' + brand_replacement: 'LG' + model_replacement: '$1' + - regex: '[;:] *(L-\d+[A-Z]|LGL\d+[A-Z]?)(?:/V\d+)? *(?:Build|[;\)])' + device_replacement: '$1' + brand_replacement: 'LG' + model_replacement: '$1' + - regex: '; *(LG-)([A-Z]{1,2}\d{2,}[^,;/\)\(]*?)(?:Build| V\d+|[,;/\)\(]|$)' + device_replacement: '$1$2' + brand_replacement: 'LG' + model_replacement: '$2' + - regex: '; *(LG[ \-]|LG)([^;/]+)[;/]? Build' + device_replacement: '$1$2' + brand_replacement: 'LG' + model_replacement: '$2' + - regex: '^(LG)-([^;/]+)/ Mozilla/.*; Android' + device_replacement: '$1 $2' + brand_replacement: 'LG' + model_replacement: '$2' + + ######### + # Malata + # @ref: http://www.malata.com/en/products.aspx?classid=680 + ######### + - regex: '; *((?:SMB|smb)[^;/]+) Build/' + device_replacement: '$1' + brand_replacement: 'Malata' + model_replacement: '$1' + - regex: '; *(?:Malata|MALATA) ([^;/]+) Build/' + device_replacement: '$1' + brand_replacement: 'Malata' + model_replacement: '$1' + + ######### + # Manta + # @ref: http://www.manta.com.pl/en + ######### + - regex: '; *(MS[45][0-9]{3}|MID0[568][NS]?|MID[1-9]|MID[78]0[1-9]|MID970[1-9]|MID100[1-9]) Build/' + device_replacement: '$1' + brand_replacement: 'Manta' + model_replacement: '$1' + + ######### + # Match + # @ref: http://www.match.net.cn/products.asp + ######### + - regex: '; *(M1052|M806|M9000|M9100|M9701|MID100|MID120|MID125|MID130|MID135|MID140|MID701|MID710|MID713|MID727|MID728|MID731|MID732|MID733|MID735|MID736|MID737|MID760|MID800|MID810|MID820|MID830|MID833|MID835|MID860|MID900|MID930|MID933|MID960|MID980) Build/' + device_replacement: '$1' + brand_replacement: 'Match' + model_replacement: '$1' + + ######### + # Maxx + # @ref: http://www.maxxmobile.in/ + # @models: Maxx MSD7-Play, Maxx MX245+ Trance, Maxx AX8 Race, Maxx MSD7 3G- AX50, Maxx Genx Droid 7 - AX40, Maxx AX5 Duo, + # Maxx AX3 Duo, Maxx AX3, Maxx AX8 Note II (Note 2), Maxx AX8 Note I, Maxx AX8, Maxx AX5 Plus, Maxx MSD7 Smarty, + # Maxx AX9Z Race, + # Maxx MT150, Maxx MQ601, Maxx M2020, Maxx Sleek MX463neo, Maxx MX525, Maxx MX192-Tune, Maxx Genx Droid 7 AX353, + # @note: Need more User-Agents!!! + ######### + - regex: '; *(GenxDroid7|MSD7.*|AX\d.*|Tab 701|Tab 722) Build/' + device_replacement: 'Maxx $1' + brand_replacement: 'Maxx' + model_replacement: '$1' + + ######### + # Mediacom + # @ref: http://www.mediacomeurope.it/ + ######### + - regex: '; *(M-PP[^;/]+|PhonePad ?\d{2,}[^;/]+) Build' + device_replacement: 'Mediacom $1' + brand_replacement: 'Mediacom' + model_replacement: '$1' + - regex: '; *(M-MP[^;/]+|SmartPad ?\d{2,}[^;/]+) Build' + device_replacement: 'Mediacom $1' + brand_replacement: 'Mediacom' + model_replacement: '$1' + + ######### + # Medion + # @ref: http://www.medion.com/en/ + ######### + - regex: '; *(?:MD_)?LIFETAB[ _]([^;/]+) Build' + regex_flag: 'i' + device_replacement: 'Medion Lifetab $1' + brand_replacement: 'Medion' + model_replacement: 'Lifetab $1' + - regex: '; *MEDION ([^;/]+) Build' + device_replacement: 'Medion $1' + brand_replacement: 'Medion' + model_replacement: '$1' + + ######### + # Meizu + # @ref: http://www.meizu.com + ######### + - regex: '; *(M030|M031|M035|M040|M065|m9) Build' + device_replacement: 'Meizu $1' + brand_replacement: 'Meizu' + model_replacement: '$1' + - regex: '; *(?:meizu_|MEIZU )(.+?) *(?:Build|[;\)])' + device_replacement: 'Meizu $1' + brand_replacement: 'Meizu' + model_replacement: '$1' + + ######### + # Micromax + # @ref: http://www.micromaxinfo.com + ######### + - regex: '; *(?:Micromax[ _](A111|A240)|(A111|A240)) Build' + regex_flag: 'i' + device_replacement: 'Micromax $1$2' + brand_replacement: 'Micromax' + model_replacement: '$1$2' + - regex: '; *Micromax[ _](A\d{2,3}[^;/]*) Build' + regex_flag: 'i' + device_replacement: 'Micromax $1' + brand_replacement: 'Micromax' + model_replacement: '$1' + # be carefull here with Acer e.g. A500 + - regex: '; *(A\d{2}|A[12]\d{2}|A90S|A110Q) Build' + regex_flag: 'i' + device_replacement: 'Micromax $1' + brand_replacement: 'Micromax' + model_replacement: '$1' + - regex: '; *Micromax[ _](P\d{3}[^;/]*) Build' + regex_flag: 'i' + device_replacement: 'Micromax $1' + brand_replacement: 'Micromax' + model_replacement: '$1' + - regex: '; *(P\d{3}|P\d{3}\(Funbook\)) Build' + regex_flag: 'i' + device_replacement: 'Micromax $1' + brand_replacement: 'Micromax' + model_replacement: '$1' + + ######### + # Mito + # @ref: http://new.mitomobile.com/ + ######### + - regex: '; *(MITO)[ _\-]?([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Mito' + model_replacement: '$2' + + ######### + # Mobistel + # @ref: http://www.mobistel.com/ + ######### + - regex: '; *(Cynus)[ _](F5|T\d|.+?) *(?:Build|[;/\)])' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Mobistel' + model_replacement: '$1 $2' + + ######### + # Modecom + # @ref: http://www.modecom.eu/tablets/portal/ + ######### + - regex: '; *(MODECOM )?(FreeTab) ?([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1$2 $3' + brand_replacement: 'Modecom' + model_replacement: '$2 $3' + - regex: '; *(MODECOM )([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Modecom' + model_replacement: '$2' + + ######### + # Motorola + # @ref: http://www.motorola.com/us/shop-all-mobile-phones/ + ######### + - regex: '; *(MZ\d{3}\+?|MZ\d{3} 4G|Xoom|XOOM[^;/]*) Build' + device_replacement: 'Motorola $1' + brand_replacement: 'Motorola' + model_replacement: '$1' + - regex: '; *(Milestone )(XT[^;/]*) Build' + device_replacement: 'Motorola $1$2' + brand_replacement: 'Motorola' + model_replacement: '$2' + - regex: '; *(Motoroi ?x|Droid X|DROIDX) Build' + regex_flag: 'i' + device_replacement: 'Motorola $1' + brand_replacement: 'Motorola' + model_replacement: 'DROID X' + - regex: '; *(Droid[^;/]*|DROID[^;/]*|Milestone[^;/]*|Photon|Triumph|Devour|Titanium) Build' + device_replacement: 'Motorola $1' + brand_replacement: 'Motorola' + model_replacement: '$1' + - regex: '; *(A555|A85[34][^;/]*|A95[356]|ME[58]\d{2}\+?|ME600|ME632|ME722|MB\d{3}\+?|MT680|MT710|MT870|MT887|MT917|WX435|WX453|WX44[25]|XT\d{3,4}[A-Z\+]*|CL[iI]Q|CL[iI]Q XT) Build' + device_replacement: '$1' + brand_replacement: 'Motorola' + model_replacement: '$1' + - regex: '; *(Motorola MOT-|Motorola[ _\-]|MOT\-?)([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Motorola' + model_replacement: '$2' + - regex: '; *(Moto[_ ]?|MOT\-)([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Motorola' + model_replacement: '$2' + + ######### + # MpMan + # @ref: http://www.mpmaneurope.com + ######### + - regex: '; *((?:MP[DQ]C|MPG\d{1,4}|MP\d{3,4}|MID(?:(?:10[234]|114|43|7[247]|8[24]|7)C|8[01]1))[^;/]*) Build' + device_replacement: '$1' + brand_replacement: 'Mpman' + model_replacement: '$1' + + ######### + # MSI + # @ref: http://www.msi.com/product/windpad/ + ######### + - regex: '; *(?:MSI[ _])?(Primo\d+|Enjoy[ _\-][^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'Msi' + model_replacement: '$1' + + ######### + # Multilaser + # http://www.multilaser.com.br/listagem_produtos.php?cat=5 + ######### + - regex: '; *Multilaser[ _]([^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Multilaser' + model_replacement: '$1' + + ######### + # MyPhone + # @ref: http://myphone.com.ph/ + ######### + - regex: '; *(My)[_]?(Pad)[ _]([^;/]+) Build' + device_replacement: '$1$2 $3' + brand_replacement: 'MyPhone' + model_replacement: '$1$2 $3' + - regex: '; *(My)\|?(Phone)[ _]([^;/]+) Build' + device_replacement: '$1$2 $3' + brand_replacement: 'MyPhone' + model_replacement: '$3' + - regex: '; *(A\d+)[ _](Duo)? Build' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'MyPhone' + model_replacement: '$1 $2' + + ######### + # Mytab + # @ref: http://www.mytab.eu/en/category/mytab-products/ + ######### + - regex: '; *(myTab[^;/]*) Build' + device_replacement: '$1' + brand_replacement: 'Mytab' + model_replacement: '$1' + + ######### + # Nabi + # @ref: https://www.nabitablet.com + ######### + - regex: '; *(NABI2?-)([^;/]+) Build/' + device_replacement: '$1$2' + brand_replacement: 'Nabi' + model_replacement: '$2' + + ######### + # Nec Medias + # @ref: http://www.n-keitai.com/ + ######### + - regex: '; *(N-\d+[CDE]) Build/' + device_replacement: '$1' + brand_replacement: 'Nec' + model_replacement: '$1' + - regex: '; ?(NEC-)(.*) Build/' + device_replacement: '$1$2' + brand_replacement: 'Nec' + model_replacement: '$2' + - regex: '; *(LT-NA7) Build/' + device_replacement: '$1' + brand_replacement: 'Nec' + model_replacement: 'Lifetouch Note' + + ######### + # Nextbook + # @ref: http://nextbookusa.com + ######### + - regex: '; *(NXM\d+[A-z0-9_]*|Next\d[A-z0-9_ \-]*|NEXT\d[A-z0-9_ \-]*|Nextbook [A-z0-9_ ]*|DATAM803HC|M805)(?: Build|[\);])' + device_replacement: '$1' + brand_replacement: 'Nextbook' + model_replacement: '$1' + + ######### + # Nokia + # @ref: http://www.nokia.com + ######### + - regex: '; *(Nokia)([ _\-]*)([^;/]*) Build' + regex_flag: 'i' + device_replacement: '$1$2$3' + brand_replacement: 'Nokia' + model_replacement: '$3' + + ######### + # Nook + # @ref: + # TODO nook browser/1.0 + ######### + - regex: '; *(Nook ?|Barnes & Noble Nook |BN )([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Nook' + model_replacement: '$2' + - regex: '; *(NOOK )?(BNRV200|BNRV200A|BNTV250|BNTV250A|BNTV400|BNTV600|LogicPD Zoom2) Build' + device_replacement: '$1$2' + brand_replacement: 'Nook' + model_replacement: '$2' + - regex: '; Build/(Nook)' + device_replacement: '$1' + brand_replacement: 'Nook' + model_replacement: 'Tablet' + + ######### + # Olivetti + # @ref: http://www.olivetti.de/EN/Page/t02/view_html?idp=348 + ######### + - regex: '; *(OP110|OliPad[^;/]+) Build' + device_replacement: 'Olivetti $1' + brand_replacement: 'Olivetti' + model_replacement: '$1' + + ######### + # Omega + # @ref: http://omega-technology.eu/en/produkty/346/tablets + # @note: MID tablets might get matched by CobyKyros first + # @models: (T107|MID(?:700[2-5]|7031|7108|7132|750[02]|8001|8500|9001|971[12]) + ######### + - regex: '; *OMEGA[ _\-](MID[^;/]+) Build' + device_replacement: 'Omega $1' + brand_replacement: 'Omega' + model_replacement: '$1' + - regex: '^(MID7500|MID\d+) Mozilla/5\.0 \(iPad;' + device_replacement: 'Omega $1' + brand_replacement: 'Omega' + model_replacement: '$1' + + ######### + # OpenPeak + # @ref: https://support.google.com/googleplay/answer/1727131?hl=en + ######### + - regex: '; *((?:CIUS|cius)[^;/]*) Build' + device_replacement: 'Openpeak $1' + brand_replacement: 'Openpeak' + model_replacement: '$1' + + ######### + # Oppo + # @ref: http://en.oppo.com/products/ + ######### + - regex: '; *(Find ?(?:5|7a)|R8[012]\d{1,2}|T703\d{0,1}|U70\d{1,2}T?|X90\d{1,2}) Build' + device_replacement: 'Oppo $1' + brand_replacement: 'Oppo' + model_replacement: '$1' + - regex: '; *OPPO ?([^;/]+) Build/' + device_replacement: 'Oppo $1' + brand_replacement: 'Oppo' + model_replacement: '$1' + + ######### + # Odys + # @ref: http://odys.de + ######### + - regex: '; *(?:Odys\-|ODYS\-|ODYS )([^;/]+) Build' + device_replacement: 'Odys $1' + brand_replacement: 'Odys' + model_replacement: '$1' + - regex: '; *(SELECT) ?(7) Build' + device_replacement: 'Odys $1 $2' + brand_replacement: 'Odys' + model_replacement: '$1 $2' + - regex: '; *(PEDI)_(PLUS)_(W) Build' + device_replacement: 'Odys $1 $2 $3' + brand_replacement: 'Odys' + model_replacement: '$1 $2 $3' + # Weltbild - Tablet PC 4 = Cat Phoenix = Odys Tablet PC 4? + - regex: '; *(AEON|BRAVIO|FUSION|FUSION2IN1|Genio|EOS10|IEOS[^;/]*|IRON|Loox|LOOX|LOOX Plus|Motion|NOON|NOON_PRO|NEXT|OPOS|PEDI[^;/]*|PRIME[^;/]*|STUDYTAB|TABLO|Tablet-PC-4|UNO_X8|XELIO[^;/]*|Xelio ?\d+ ?[Pp]ro|XENO10|XPRESS PRO) Build' + device_replacement: 'Odys $1' + brand_replacement: 'Odys' + model_replacement: '$1' + + ######### + # Orion + # @ref: http://www.orion.ua/en/products/computer-products/tablet-pcs.html + ######### + - regex: '; *(TP-\d+) Build/' + device_replacement: 'Orion $1' + brand_replacement: 'Orion' + model_replacement: '$1' + + ######### + # PackardBell + # @ref: http://www.packardbell.com/pb/en/AE/content/productgroup/tablets + ######### + - regex: '; *(G100W?) Build/' + device_replacement: 'PackardBell $1' + brand_replacement: 'PackardBell' + model_replacement: '$1' + + ######### + # Panasonic + # @ref: http://panasonic.jp/mobile/ + # @models: T11, T21, T31, P11, P51, Eluga Power, Eluga DL1 + # @models: (tab) Toughpad FZ-A1, Toughpad JT-B1 + ######### + - regex: '; *(Panasonic)[_ ]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + # Toughpad + - regex: '; *(FZ-A1B|JT-B1) Build' + device_replacement: 'Panasonic $1' + brand_replacement: 'Panasonic' + model_replacement: '$1' + # Eluga Power + - regex: '; *(dL1|DL1) Build' + device_replacement: 'Panasonic $1' + brand_replacement: 'Panasonic' + model_replacement: '$1' + + ######### + # Pantech + # @href: http://www.pantech.co.kr/en/prod/prodList.do?gbrand=PANTECH + # @href: http://www.pantech.co.kr/en/prod/prodList.do?gbrand=VEGA + # @models: ADR8995, ADR910L, ADR930VW, C790, CDM8992, CDM8999, IS06, IS11PT, P2000, P2020, P2030, P4100, P5000, P6010, P6020, P6030, P7000, P7040, P8000, P8010, P9020, P9050, P9060, P9070, P9090, PT001, PT002, PT003, TXT8040, TXT8045, VEGA PTL21 + ######### + - regex: '; *(SKY[ _])?(IM\-[AT]\d{3}[^;/]+).* Build/' + device_replacement: 'Pantech $1$2' + brand_replacement: 'Pantech' + model_replacement: '$1$2' + - regex: '; *((?:ADR8995|ADR910L|ADR930L|ADR930VW|PTL21|P8000)(?: 4G)?) Build/' + device_replacement: '$1' + brand_replacement: 'Pantech' + model_replacement: '$1' + - regex: '; *Pantech([^;/]+).* Build/' + device_replacement: 'Pantech $1' + brand_replacement: 'Pantech' + model_replacement: '$1' + + ######### + # Papayre + # @ref: http://grammata.es/ + ######### + - regex: '; *(papyre)[ _\-]([^;/]+) Build/' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Papyre' + model_replacement: '$2' + + ######### + # Pearl + # @ref: http://www.pearl.de/c-1540.shtml + ######### + - regex: '; *(?:Touchlet )?(X10\.[^;/]+) Build/' + device_replacement: 'Pearl $1' + brand_replacement: 'Pearl' + model_replacement: '$1' + + ######### + # Phicomm + # @ref: http://www.phicomm.com.cn/ + ######### + - regex: '; PHICOMM (i800) Build/' + device_replacement: 'Phicomm $1' + brand_replacement: 'Phicomm' + model_replacement: '$1' + - regex: '; PHICOMM ([^;/]+) Build/' + device_replacement: 'Phicomm $1' + brand_replacement: 'Phicomm' + model_replacement: '$1' + - regex: '; *(FWS\d{3}[^;/]+) Build/' + device_replacement: 'Phicomm $1' + brand_replacement: 'Phicomm' + model_replacement: '$1' + + ######### + # Philips + # @ref: http://www.support.philips.com/support/catalog/products.jsp?_dyncharset=UTF-8&country=&categoryid=MOBILE_PHONES_SMART_SU_CN_CARE&userLanguage=en&navCount=2&groupId=PC_PRODUCTS_AND_PHONES_GR_CN_CARE&catalogType=&navAction=push&userCountry=cn&title=Smartphones&cateId=MOBILE_PHONES_CA_CN_CARE + # @TODO: Philips Tablets User-Agents missing! + # @ref: http://www.support.philips.com/support/catalog/products.jsp?_dyncharset=UTF-8&country=&categoryid=ENTERTAINMENT_TABLETS_SU_CN_CARE&userLanguage=en&navCount=0&groupId=&catalogType=&navAction=push&userCountry=cn&title=Entertainment+Tablets&cateId=TABLETS_CA_CN_CARE + ######### + # @note: this a best guess according to available philips models. Need more User-Agents + - regex: '; *(D633|D822|D833|T539|T939|V726|W335|W336|W337|W3568|W536|W5510|W626|W632|W6350|W6360|W6500|W732|W736|W737|W7376|W820|W832|W8355|W8500|W8510|W930) Build' + device_replacement: '$1' + brand_replacement: 'Philips' + model_replacement: '$1' + - regex: '; *(?:Philips|PHILIPS)[ _]([^;/]+) Build' + device_replacement: 'Philips $1' + brand_replacement: 'Philips' + model_replacement: '$1' + + ######### + # Pipo + # @ref: http://www.pipo.cn/En/ + ######### + - regex: 'Android 4\..*; *(M[12356789]|U[12368]|S[123])\ ?(pro)? Build' + device_replacement: 'Pipo $1$2' + brand_replacement: 'Pipo' + model_replacement: '$1$2' + + ######### + # Ployer + # @ref: http://en.ployer.cn/ + ######### + - regex: '; *(MOMO[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Ployer' + model_replacement: '$1' + + ######### + # Polaroid/ Acho + # @ref: http://polaroidstore.com/store/start.asp?category_id=382&category_id2=0&order=title&filter1=&filter2=&filter3=&view=all + ######### + - regex: '; *(?:Polaroid[ _])?((?:MIDC\d{3,}|PMID\d{2,}|PTAB\d{3,})[^;/]*)(\/[^;/]*)? Build/' + device_replacement: '$1' + brand_replacement: 'Polaroid' + model_replacement: '$1' + - regex: '; *(?:Polaroid )(Tablet) Build/' + device_replacement: '$1' + brand_replacement: 'Polaroid' + model_replacement: '$1' + + ######### + # Pomp + # @ref: http://pompmobileshop.com/ + ######### + #~ TODO + - regex: '; *(POMP)[ _\-](.+?) *(?:Build|[;/\)])' + device_replacement: '$1 $2' + brand_replacement: 'Pomp' + model_replacement: '$2' + + ######### + # Positivo + # @ref: http://www.positivoinformatica.com.br/www/pessoal/tablet-ypy/ + ######### + - regex: '; *(TB07STA|TB10STA|TB07FTA|TB10FTA) Build/' + device_replacement: '$1' + brand_replacement: 'Positivo' + model_replacement: '$1' + - regex: '; *(?:Positivo )?((?:YPY|Ypy)[^;/]+) Build/' + device_replacement: '$1' + brand_replacement: 'Positivo' + model_replacement: '$1' + + ######### + # POV + # @ref: http://www.pointofview-online.com/default2.php + # @TODO: Smartphone Models MOB-3515, MOB-5045-B missing + ######### + - regex: '; *(MOB-[^;/]+) Build/' + device_replacement: '$1' + brand_replacement: 'POV' + model_replacement: '$1' + - regex: '; *POV[ _\-]([^;/]+) Build/' + device_replacement: 'POV $1' + brand_replacement: 'POV' + model_replacement: '$1' + - regex: '; *((?:TAB-PLAYTAB|TAB-PROTAB|PROTAB|PlayTabPro|Mobii[ _\-]|TAB-P)[^;/]*) Build/' + device_replacement: 'POV $1' + brand_replacement: 'POV' + model_replacement: '$1' + + ######### + # Prestigio + # @ref: http://www.prestigio.com/catalogue/MultiPhones + # @ref: http://www.prestigio.com/catalogue/MultiPads + ######### + - regex: '; *(?:Prestigio )?((?:PAP|PMP)\d[^;/]+) Build/' + device_replacement: 'Prestigio $1' + brand_replacement: 'Prestigio' + model_replacement: '$1' + + ######### + # Proscan + # @ref: http://www.proscanvideo.com/products-search.asp?itemClass=TABLET&itemnmbr= + ######### + - regex: '; *(PLT[0-9]{4}.*) Build/' + device_replacement: '$1' + brand_replacement: 'Proscan' + model_replacement: '$1' + + ######### + # QMobile + # @ref: http://www.qmobile.com.pk/ + ######### + - regex: '; *(A2|A5|A8|A900)_?(Classic)? Build' + device_replacement: '$1 $2' + brand_replacement: 'Qmobile' + model_replacement: '$1 $2' + - regex: '; *(Q[Mm]obile)_([^_]+)_([^_]+) Build' + device_replacement: 'Qmobile $2 $3' + brand_replacement: 'Qmobile' + model_replacement: '$2 $3' + - regex: '; *(Q\-?[Mm]obile)[_ ](A[^;/]+) Build' + device_replacement: 'Qmobile $2' + brand_replacement: 'Qmobile' + model_replacement: '$2' + + ######### + # Qmobilevn + # @ref: http://qmobile.vn/san-pham.html + ######### + - regex: '; *(Q\-Smart)[ _]([^;/]+) Build/' + device_replacement: '$1 $2' + brand_replacement: 'Qmobilevn' + model_replacement: '$2' + - regex: '; *(Q\-?[Mm]obile)[ _\-](S[^;/]+) Build/' + device_replacement: '$1 $2' + brand_replacement: 'Qmobilevn' + model_replacement: '$2' + + ######### + # Quanta + # @ref: ? + ######### + - regex: '; *(TA1013) Build' + device_replacement: '$1' + brand_replacement: 'Quanta' + model_replacement: '$1' + + ######### + # Rockchip + # @ref: http://www.rock-chips.com/a/cn/product/index.html + # @note: manufacturer sells chipsets - I assume that these UAs are dev-boards + ######### + - regex: '; *(RK\d+),? Build/' + device_replacement: '$1' + brand_replacement: 'Rockchip' + model_replacement: '$1' + - regex: ' Build/(RK\d+)' + device_replacement: '$1' + brand_replacement: 'Rockchip' + model_replacement: '$1' + + ######### + # Samsung Android Devices + # @ref: http://www.samsung.com/us/mobile/cell-phones/all-products + ######### + - regex: '; *(SAMSUNG |Samsung )?((?:Galaxy (?:Note II|S\d)|GT-I9082|GT-I9205|GT-N7\d{3}|SM-N9005)[^;/]*)\/?[^;/]* Build/' + device_replacement: 'Samsung $1$2' + brand_replacement: 'Samsung' + model_replacement: '$2' + - regex: '; *(Google )?(Nexus [Ss](?: 4G)?) Build/' + device_replacement: 'Samsung $1$2' + brand_replacement: 'Samsung' + model_replacement: '$2' + - regex: '; *(SAMSUNG |Samsung )([^\/]*)\/[^ ]* Build/' + device_replacement: 'Samsung $2' + brand_replacement: 'Samsung' + model_replacement: '$2' + - regex: '; *(Galaxy(?: Ace| Nexus| S ?II+|Nexus S| with MCR 1.2| Mini Plus 4G)?) Build/' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + - regex: '; *(SAMSUNG[ _\-] *)+([^;/]+) Build' + device_replacement: 'Samsung $2' + brand_replacement: 'Samsung' + model_replacement: '$2' + - regex: '; *(SAMSUNG-)?(GT\-[BINPS]\d{4}[^\/]*)(\/[^ ]*) Build' + device_replacement: 'Samsung $1$2$3' + brand_replacement: 'Samsung' + model_replacement: '$2' + - regex: '(?:; *|^)((?:GT\-[BIiNPS]\d{4}|I9\d{2}0[A-Za-z\+]?\b)[^;/\)]*?)(?:Build|Linux|MIUI|[;/\)])' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + - regex: '; (SAMSUNG-)([A-Za-z0-9\-]+).* Build/' + device_replacement: 'Samsung $1$2' + brand_replacement: 'Samsung' + model_replacement: '$2' + - regex: '; *((?:SCH|SGH|SHV|SHW|SPH|SC|SM)\-[A-Za-z0-9 ]+)(/?[^ ]*)? Build' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + - regex: ' ((?:SCH)\-[A-Za-z0-9 ]+)(/?[^ ]*)? Build' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + - regex: '; *(Behold ?(?:2|II)|YP\-G[^;/]+|EK-GC100|SCL21|I9300) Build' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + + ######### + # Sharp + # @ref: http://www.sharp-phone.com/en/index.html + # @ref: http://www.android.com/devices/?country=all&m=sharp + ######### + - regex: '; *(SH\-?\d\d[^;/]+|SBM\d[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Sharp' + model_replacement: '$1' + - regex: '; *(SHARP[ -])([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'Sharp' + model_replacement: '$2' + + ######### + # Simvalley + # @ref: http://www.simvalley-mobile.de/ + ######### + - regex: '; *(SPX[_\-]\d[^;/]*) Build/' + device_replacement: '$1' + brand_replacement: 'Simvalley' + model_replacement: '$1' + - regex: '; *(SX7\-PEARL\.GmbH) Build/' + device_replacement: '$1' + brand_replacement: 'Simvalley' + model_replacement: '$1' + - regex: '; *(SP[T]?\-\d{2}[^;/]*) Build/' + device_replacement: '$1' + brand_replacement: 'Simvalley' + model_replacement: '$1' + + ######### + # SK Telesys + # @ref: http://www.sk-w.com/phone/phone_list.jsp + # @ref: http://www.android.com/devices/?country=all&m=sk-telesys + ######### + - regex: '; *(SK\-.*) Build/' + device_replacement: '$1' + brand_replacement: 'SKtelesys' + model_replacement: '$1' + + ######### + # Skytex + # @ref: http://skytex.com/android + ######### + - regex: '; *(?:SKYTEX|SX)-([^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Skytex' + model_replacement: '$1' + - regex: '; *(IMAGINE [^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Skytex' + model_replacement: '$1' + + ######### + # SmartQ + # @ref: http://en.smartdevices.com.cn/Products/ + # @models: Z8, X7, U7H, U7, T30, T20, Ten3, V5-II, T7-3G, SmartQ5, K7, S7, Q8, T19, Ten2, Ten, R10, T7, R7, V5, V7, SmartQ7 + ######### + - regex: '; *(SmartQ) ?([^;/]+) Build/' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Smartbitt + # @ref: http://www.smartbitt.com/ + # @missing: SBT Useragents + ######### + - regex: '; *(WF7C|WF10C|SBT[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Smartbitt' + model_replacement: '$1' + + ######### + # Softbank (Operator Branded Devices) + # @ref: http://www.ipentec.com/document/document.aspx?page=android-useragent + ######### + - regex: '; *(SBM(?:003SH|005SH|006SH|007SH|102SH)) Build' + device_replacement: '$1' + brand_replacement: 'Sharp' + model_replacement: '$1' + - regex: '; *(003P|101P|101P11C|102P) Build' + device_replacement: '$1' + brand_replacement: 'Panasonic' + model_replacement: '$1' + - regex: '; *(00\dZ) Build/' + device_replacement: '$1' + brand_replacement: 'ZTE' + model_replacement: '$1' + - regex: '; HTC(X06HT) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: '$1' + - regex: '; *(001HT|X06HT) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: '$1' + - regex: '; *(201M) Build' + device_replacement: '$1' + brand_replacement: 'Motorola' + model_replacement: 'XT902' + + ######### + # Trekstor + # @ref: http://www.trekstor.co.uk/surftabs-en.html + # @note: Must come before SonyEricsson + ######### + - regex: '; *(ST\d{4}.*)Build/ST' + device_replacement: 'Trekstor $1' + brand_replacement: 'Trekstor' + model_replacement: '$1' + - regex: '; *(ST\d{4}.*) Build/' + device_replacement: 'Trekstor $1' + brand_replacement: 'Trekstor' + model_replacement: '$1' + + ######### + # SonyEricsson + # @note: Must come before nokia since they also use symbian + # @ref: http://www.android.com/devices/?country=all&m=sony-ericssons + # @TODO: type! + ######### + # android matchers + - regex: '; *(Sony ?Ericsson ?)([^;/]+) Build' + device_replacement: '$1$2' + brand_replacement: 'SonyEricsson' + model_replacement: '$2' + - regex: '; *((?:SK|ST|E|X|LT|MK|MT|WT)\d{2}[a-z0-9]*(?:-o)?|R800i|U20i) Build' + device_replacement: '$1' + brand_replacement: 'SonyEricsson' + model_replacement: '$1' + # TODO X\d+ is wrong + - regex: '; *(Xperia (?:A8|Arc|Acro|Active|Live with Walkman|Mini|Neo|Play|Pro|Ray|X\d+)[^;/]*) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'SonyEricsson' + model_replacement: '$1' + + ######### + # Sony + # @ref: http://www.sonymobile.co.jp/index.html + # @ref: http://www.sonymobile.com/global-en/products/phones/ + # @ref: http://www.sony.jp/tablet/ + ######### + - regex: '; Sony (Tablet[^;/]+) Build' + device_replacement: 'Sony $1' + brand_replacement: 'Sony' + model_replacement: '$1' + - regex: '; Sony ([^;/]+) Build' + device_replacement: 'Sony $1' + brand_replacement: 'Sony' + model_replacement: '$1' + - regex: '; *(Sony)([A-Za-z0-9\-]+) Build' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + - regex: '; *(Xperia [^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Sony' + model_replacement: '$1' + - regex: '; *(C(?:1[0-9]|2[0-9]|53|55|6[0-9])[0-9]{2}|D[25]\d{3}|D6[56]\d{2}) Build' + device_replacement: '$1' + brand_replacement: 'Sony' + model_replacement: '$1' + - regex: '; *(SGP\d{3}|SGPT\d{2}) Build' + device_replacement: '$1' + brand_replacement: 'Sony' + model_replacement: '$1' + - regex: '; *(NW-Z1000Series) Build' + device_replacement: '$1' + brand_replacement: 'Sony' + model_replacement: '$1' + + ########## + # Sony PlayStation + # @ref: http://playstation.com + # The Vita spoofs the Kindle + ########## + - regex: 'PLAYSTATION 3' + device_replacement: 'PlayStation 3' + brand_replacement: 'Sony' + model_replacement: 'PlayStation 3' + - regex: '(PlayStation (?:Portable|Vita|\d+))' + device_replacement: '$1' + brand_replacement: 'Sony' + model_replacement: '$1' + + ######### + # Spice + # @ref: http://www.spicemobilephones.co.in/ + ######### + - regex: '; *((?:CSL_Spice|Spice|SPICE|CSL)[ _\-]?)?([Mm][Ii])([ _\-])?(\d{3}[^;/]*) Build/' + device_replacement: '$1$2$3$4' + brand_replacement: 'Spice' + model_replacement: 'Mi$4' + + ######### + # Sprint (Operator Branded Devices) + # @ref: + ######### + - regex: '; *(Sprint )(.+?) *(?:Build|[;/])' + device_replacement: '$1$2' + brand_replacement: 'Sprint' + model_replacement: '$2' + - regex: '\b(Sprint)[: ]([^;,/ ]+)' + device_replacement: '$1$2' + brand_replacement: 'Sprint' + model_replacement: '$2' + + ######### + # Tagi + # @ref: ?? + ######### + - regex: '; *(TAGI[ ]?)(MID) ?([^;/]+) Build/' + device_replacement: '$1$2$3' + brand_replacement: 'Tagi' + model_replacement: '$2$3' + + ######### + # Tecmobile + # @ref: http://www.tecmobile.com/ + ######### + - regex: '; *(Oyster500|Opal 800) Build' + device_replacement: 'Tecmobile $1' + brand_replacement: 'Tecmobile' + model_replacement: '$1' + + ######### + # Tecno + # @ref: www.tecno-mobile.com/‎ + ######### + - regex: '; *(TECNO[ _])([^;/]+) Build/' + device_replacement: '$1$2' + brand_replacement: 'Tecno' + model_replacement: '$2' + + ######### + # Telechips, Techvision evaluation boards + # @ref: + ######### + - regex: '; *Android for (Telechips|Techvision) ([^ ]+) ' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Telstra + # @ref: http://www.telstra.com.au/home-phone/thub-2/ + # @ref: https://support.google.com/googleplay/answer/1727131?hl=en + ######### + - regex: '; *(T-Hub2) Build/' + device_replacement: '$1' + brand_replacement: 'Telstra' + model_replacement: '$1' + + ######### + # Terra + # @ref: http://www.wortmann.de/ + ######### + - regex: '; *(PAD) ?(100[12]) Build/' + device_replacement: 'Terra $1$2' + brand_replacement: 'Terra' + model_replacement: '$1$2' + + ######### + # Texet + # @ref: http://www.texet.ru/tablet/ + ######### + - regex: '; *(T[BM]-\d{3}[^;/]+) Build/' + device_replacement: '$1' + brand_replacement: 'Texet' + model_replacement: '$1' + + ######### + # Thalia + # @ref: http://www.thalia.de/shop/tolino-shine-ereader/show/ + ######### + - regex: '; *(tolino [^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Thalia' + model_replacement: '$1' + - regex: '; *Build/.* (TOLINO_BROWSER)' + device_replacement: '$1' + brand_replacement: 'Thalia' + model_replacement: 'Tolino Shine' + + ######### + # Thl + # @ref: http://en.thl.com.cn/Mobile + # @ref: http://thlmobilestore.com + ######### + - regex: '; *(?:CJ[ -])?(ThL|THL)[ -]([^;/]+) Build/' + device_replacement: '$1 $2' + brand_replacement: 'Thl' + model_replacement: '$2' + - regex: '; *(T100|T200|T5|W100|W200|W8s) Build/' + device_replacement: '$1' + brand_replacement: 'Thl' + model_replacement: '$1' + + ######### + # T-Mobile (Operator Branded Devices) + ######### + # @ref: https://en.wikipedia.org/wiki/HTC_Hero + - regex: '; *(T-Mobile[ _]G2[ _]Touch) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'Hero' + # @ref: https://en.wikipedia.org/wiki/HTC_Desire_Z + - regex: '; *(T-Mobile[ _]G2) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'Desire Z' + - regex: '; *(T-Mobile myTouch Q) Build' + device_replacement: '$1' + brand_replacement: 'Huawei' + model_replacement: 'U8730' + - regex: '; *(T-Mobile myTouch) Build' + device_replacement: '$1' + brand_replacement: 'Huawei' + model_replacement: 'U8680' + - regex: '; *(T-Mobile_Espresso) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'Espresso' + - regex: '; *(T-Mobile G1) Build' + device_replacement: '$1' + brand_replacement: 'HTC' + model_replacement: 'Dream' + - regex: '\b(T-Mobile ?)?(myTouch)[ _]?([34]G)[ _]?([^\/]*) (?:Mozilla|Build)' + device_replacement: '$1$2 $3 $4' + brand_replacement: 'HTC' + model_replacement: '$2 $3 $4' + - regex: '\b(T-Mobile)_([^_]+)_(.*) Build' + device_replacement: '$1 $2 $3' + brand_replacement: 'Tmobile' + model_replacement: '$2 $3' + - regex: '\b(T-Mobile)[_ ]?(.*?)Build' + device_replacement: '$1 $2' + brand_replacement: 'Tmobile' + model_replacement: '$2' + + ######### + # Tomtec + # @ref: http://www.tom-tec.eu/pages/tablets.php + ######### + - regex: ' (ATP[0-9]{4}) Build' + device_replacement: '$1' + brand_replacement: 'Tomtec' + model_replacement: '$1' + + ######### + # Tooky + # @ref: http://www.tookymobile.com/ + ######### + - regex: ' *(TOOKY)[ _\-]([^;/]+) ?(?:Build|;)' + regex_flag: 'i' + device_replacement: '$1 $2' + brand_replacement: 'Tooky' + model_replacement: '$2' + + ######### + # Toshiba + # @ref: http://www.toshiba.co.jp/ + # @missing: LT170, Thrive 7, TOSHIBA STB10 + ######### + - regex: '\b(TOSHIBA_AC_AND_AZ|TOSHIBA_FOLIO_AND_A|FOLIO_AND_A)' + device_replacement: '$1' + brand_replacement: 'Toshiba' + model_replacement: 'Folio 100' + - regex: '; *([Ff]olio ?100) Build/' + device_replacement: '$1' + brand_replacement: 'Toshiba' + model_replacement: 'Folio 100' + - regex: '; *(AT[0-9]{2,3}(?:\-A|LE\-A|PE\-A|SE|a)?|AT7-A|AT1S0|Hikari-iFrame/WDPF-[^;/]+|THRiVE|Thrive) Build/' + device_replacement: 'Toshiba $1' + brand_replacement: 'Toshiba' + model_replacement: '$1' + + ######### + # Touchmate + # @ref: http://touchmatepc.com/new/ + ######### + - regex: '; *(TM-MID\d+[^;/]+|TOUCHMATE|MID-750) Build' + device_replacement: '$1' + brand_replacement: 'Touchmate' + model_replacement: '$1' + # @todo: needs verification user-agents missing + - regex: '; *(TM-SM\d+[^;/]+) Build' + device_replacement: '$1' + brand_replacement: 'Touchmate' + model_replacement: '$1' + + ######### + # Treq + # @ref: http://www.treq.co.id/product + ######### + - regex: '; *(A10 [Bb]asic2?) Build/' + device_replacement: '$1' + brand_replacement: 'Treq' + model_replacement: '$1' + - regex: '; *(TREQ[ _\-])([^;/]+) Build' + regex_flag: 'i' + device_replacement: '$1$2' + brand_replacement: 'Treq' + model_replacement: '$2' + + ######### + # Umeox + # @ref: http://umeox.com/ + # @models: A936|A603|X-5|X-3 + ######### + # @todo: guessed markers + - regex: '; *(X-?5|X-?3) Build/' + device_replacement: '$1' + brand_replacement: 'Umeox' + model_replacement: '$1' + # @todo: guessed markers + - regex: '; *(A502\+?|A936|A603|X1|X2) Build/' + device_replacement: '$1' + brand_replacement: 'Umeox' + model_replacement: '$1' + + ######### + # Versus + # @ref: http://versusuk.com/support.html + ######### + - regex: '(TOUCH(?:TAB|PAD).+?) Build/' + regex_flag: 'i' + device_replacement: 'Versus $1' + brand_replacement: 'Versus' + model_replacement: '$1' + + ######### + # Vertu + # @ref: http://www.vertu.com/ + ######### + - regex: '(VERTU) ([^;/]+) Build/' + device_replacement: '$1 $2' + brand_replacement: 'Vertu' + model_replacement: '$2' + + ######### + # Videocon + # @ref: http://www.videoconmobiles.com + ######### + - regex: '; *(Videocon)[ _\-]([^;/]+) *(?:Build|;)' + device_replacement: '$1 $2' + brand_replacement: 'Videocon' + model_replacement: '$2' + - regex: ' (VT\d{2}[A-Za-z]*) Build' + device_replacement: '$1' + brand_replacement: 'Videocon' + model_replacement: '$1' + + ######### + # Viewsonic + # @ref: http://viewsonic.com + ######### + - regex: '; *((?:ViewPad|ViewPhone|VSD)[^;/]+) Build/' + device_replacement: '$1' + brand_replacement: 'Viewsonic' + model_replacement: '$1' + - regex: '; *(ViewSonic-)([^;/]+) Build/' + device_replacement: '$1$2' + brand_replacement: 'Viewsonic' + model_replacement: '$2' + - regex: '; *(GTablet.*) Build/' + device_replacement: '$1' + brand_replacement: 'Viewsonic' + model_replacement: '$1' + + ######### + # vivo + # @ref: http://vivo.cn/ + ######### + - regex: '; *([Vv]ivo)[ _]([^;/]+) Build' + device_replacement: '$1 $2' + brand_replacement: 'vivo' + model_replacement: '$2' + + ######### + # Vodafone (Operator Branded Devices) + # @ref: ?? + ######### + - regex: '(Vodafone) (.*) Build/' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Walton + # @ref: http://www.waltonbd.com/ + ######### + - regex: '; *(?:Walton[ _\-])?(Primo[ _\-][^;/]+) Build' + regex_flag: 'i' + device_replacement: 'Walton $1' + brand_replacement: 'Walton' + model_replacement: '$1' + + ######### + # Wiko + # @ref: http://fr.wikomobile.com/collection.php?s=Smartphones + ######### + - regex: '; *(?:WIKO[ \-])?(CINK\+?|BARRY|BLOOM|DARKFULL|DARKMOON|DARKNIGHT|DARKSIDE|FIZZ|HIGHWAY|IGGY|OZZY|RAINBOW|STAIRWAY|SUBLIM|WAX|CINK [^;/]+) Build/' + regex_flag: 'i' + device_replacement: 'Wiko $1' + brand_replacement: 'Wiko' + model_replacement: '$1' + + ######### + # WellcoM + # @ref: ?? + ######### + - regex: '; *WellcoM-([^;/]+) Build' + device_replacement: 'Wellcom $1' + brand_replacement: 'Wellcom' + model_replacement: '$1' + + ########## + # WeTab + # @ref: http://wetab.mobi/ + ########## + - regex: '(?:(WeTab)-Browser|; (wetab) Build)' + device_replacement: '$1' + brand_replacement: 'WeTab' + model_replacement: 'WeTab' + + ######### + # Wolfgang + # @ref: http://wolfgangmobile.com/ + ######### + - regex: '; *(AT-AS[^;/]+) Build' + device_replacement: 'Wolfgang $1' + brand_replacement: 'Wolfgang' + model_replacement: '$1' + + ######### + # Woxter + # @ref: http://www.woxter.es/es-es/categories/index + ######### + - regex: '; *(?:Woxter|Wxt) ([^;/]+) Build' + device_replacement: 'Woxter $1' + brand_replacement: 'Woxter' + model_replacement: '$1' + + ######### + # Yarvik Zania + # @ref: http://yarvik.com + ######### + - regex: '; *(?:Xenta |Luna )?(TAB[234][0-9]{2}|TAB0[78]-\d{3}|TAB0?9-\d{3}|TAB1[03]-\d{3}|SMP\d{2}-\d{3}) Build/' + device_replacement: 'Yarvik $1' + brand_replacement: 'Yarvik' + model_replacement: '$1' + + ######### + # Yifang + # @note: Needs to be at the very last as manufacturer builds for other brands. + # @ref: http://www.yifangdigital.com/ + # @models: M1010, M1011, M1007, M1008, M1005, M899, M899LP, M909, M8000, + # M8001, M8002, M8003, M849, M815, M816, M819, M805, M878, M780LPW, + # M778, M7000, M7000AD, M7000NBD, M7001, M7002, M7002KBD, M777, M767, + # M789, M799, M769, M757, M755, M753, M752, M739, M729, M723, M712, M727 + ######### + - regex: '; *([A-Z]{2,4})(M\d{3,}[A-Z]{2})([^;\)\/]*)(?: Build|[;\)])' + device_replacement: 'Yifang $1$2$3' + brand_replacement: 'Yifang' + model_replacement: '$2' + + ######### + # XiaoMi + # @ref: http://www.xiaomi.com/event/buyphone + ######### + - regex: '; *((MI|HM|MI-ONE|Redmi)[ -](NOTE |Note )?[^;/]*) (Build|MIUI)/' + device_replacement: 'XiaoMi $1' + brand_replacement: 'XiaoMi' + model_replacement: '$1' + + ######### + # Xolo + # @ref: http://www.xolo.in/ + ######### + - regex: '; *XOLO[ _]([^;/]*tab.*) Build' + regex_flag: 'i' + device_replacement: 'Xolo $1' + brand_replacement: 'Xolo' + model_replacement: '$1' + - regex: '; *XOLO[ _]([^;/]+) Build' + regex_flag: 'i' + device_replacement: 'Xolo $1' + brand_replacement: 'Xolo' + model_replacement: '$1' + - regex: '; *(q\d0{2,3}[a-z]?) Build' + regex_flag: 'i' + device_replacement: 'Xolo $1' + brand_replacement: 'Xolo' + model_replacement: '$1' + + ######### + # Xoro + # @ref: http://www.xoro.de/produkte/ + ######### + - regex: '; *(PAD ?[79]\d+[^;/]*|TelePAD\d+[^;/]) Build' + device_replacement: 'Xoro $1' + brand_replacement: 'Xoro' + model_replacement: '$1' + + ######### + # Zopo + # @ref: http://www.zopomobiles.com/products.html + ######### + - regex: '; *(?:(?:ZOPO|Zopo)[ _]([^;/]+)|(ZP ?(?:\d{2}[^;/]+|C2))|(C[2379])) Build' + device_replacement: '$1$2$3' + brand_replacement: 'Zopo' + model_replacement: '$1$2$3' + + ######### + # ZiiLabs + # @ref: http://www.ziilabs.com/products/platforms/androidreferencetablets.php + ######### + - regex: '; *(ZiiLABS) (Zii[^;/]*) Build' + device_replacement: '$1 $2' + brand_replacement: 'ZiiLabs' + model_replacement: '$2' + - regex: '; *(Zii)_([^;/]*) Build' + device_replacement: '$1 $2' + brand_replacement: 'ZiiLabs' + model_replacement: '$2' + + ######### + # ZTE + # @ref: http://www.ztedevices.com/ + ######### + - regex: '; *(ARIZONA|(?:ATLAS|Atlas) W|D930|Grand (?:[SX][^;]*|Era|Memo[^;]*)|JOE|(?:Kis|KIS)\b[^;]*|Libra|Light [^;]*|N8[056][01]|N850L|N8000|N9[15]\d{2}|N9810|NX501|Optik|(?:Vip )Racer[^;]*|RacerII|RACERII|San Francisco[^;]*|V9[AC]|V55|V881|Z[679][0-9]{2}[A-z]?) Build' + device_replacement: '$1' + brand_replacement: 'ZTE' + model_replacement: '$1' + - regex: '; *([A-Z]\d+)_USA_[^;]* Build' + device_replacement: '$1' + brand_replacement: 'ZTE' + model_replacement: '$1' + - regex: '; *(SmartTab\d+)[^;]* Build' + device_replacement: '$1' + brand_replacement: 'ZTE' + model_replacement: '$1' + - regex: '; *(?:Blade|BLADE|ZTE-BLADE)([^;/]*) Build' + device_replacement: 'ZTE Blade$1' + brand_replacement: 'ZTE' + model_replacement: 'Blade$1' + - regex: '; *(?:Skate|SKATE|ZTE-SKATE)([^;/]*) Build' + device_replacement: 'ZTE Skate$1' + brand_replacement: 'ZTE' + model_replacement: 'Skate$1' + - regex: '; *(Orange |Optimus )(Monte Carlo|San Francisco) Build' + device_replacement: '$1$2' + brand_replacement: 'ZTE' + model_replacement: '$1$2' + - regex: '; *(?:ZXY-ZTE_|ZTE\-U |ZTE[\- _]|ZTE-C[_ ])([^;/]+) Build' + device_replacement: 'ZTE $1' + brand_replacement: 'ZTE' + model_replacement: '$1' + # operator specific + - regex: '; (BASE) (lutea|Lutea 2|Tab[^;]*) Build' + device_replacement: '$1 $2' + brand_replacement: 'ZTE' + model_replacement: '$1 $2' + - regex: '; (Avea inTouch 2|soft stone|tmn smart a7|Movistar[ _]Link) Build' + regex_flag: 'i' + device_replacement: '$1' + brand_replacement: 'ZTE' + model_replacement: '$1' + - regex: '; *(vp9plus)\)' + device_replacement: '$1' + brand_replacement: 'ZTE' + model_replacement: '$1' + + ########## + # Zync + # @ref: http://www.zync.in/index.php/our-products/tablet-phablets + ########## + - regex: '; ?(Cloud[ _]Z5|z1000|Z99 2G|z99|z930|z999|z990|z909|Z919|z900) Build/' + device_replacement: '$1' + brand_replacement: 'Zync' + model_replacement: '$1' + + ########## + # Kindle + # @note: Needs to be after Sony Playstation Vita as this UA contains Silk/3.2 + # @ref: https://developer.amazon.com/sdk/fire/specifications.html + # @ref: http://amazonsilk.wordpress.com/useful-bits/silk-user-agent/ + ########## + - regex: '; ?(KFOT|Kindle Fire) Build\b' + device_replacement: 'Kindle Fire' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire' + - regex: '; ?(KFOTE|Amazon Kindle Fire2) Build\b' + device_replacement: 'Kindle Fire 2' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire 2' + - regex: '; ?(KFTT) Build\b' + device_replacement: 'Kindle Fire HD' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HD 7"' + - regex: '; ?(KFJWI) Build\b' + device_replacement: 'Kindle Fire HD 8.9" WiFi' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HD 8.9" WiFi' + - regex: '; ?(KFJWA) Build\b' + device_replacement: 'Kindle Fire HD 8.9" 4G' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HD 8.9" 4G' + - regex: '; ?(KFSOWI) Build\b' + device_replacement: 'Kindle Fire HD 7" WiFi' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HD 7" WiFi' + - regex: '; ?(KFTHWI) Build\b' + device_replacement: 'Kindle Fire HDX 7" WiFi' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HDX 7" WiFi' + - regex: '; ?(KFTHWA) Build\b' + device_replacement: 'Kindle Fire HDX 7" 4G' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HDX 7" 4G' + - regex: '; ?(KFAPWI) Build\b' + device_replacement: 'Kindle Fire HDX 8.9" WiFi' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HDX 8.9" WiFi' + - regex: '; ?(KFAPWA) Build\b' + device_replacement: 'Kindle Fire HDX 8.9" 4G' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire HDX 8.9" 4G' + - regex: '; ?Amazon ([^;/]+) Build\b' + device_replacement: '$1' + brand_replacement: 'Amazon' + model_replacement: '$1' + - regex: '; ?(Kindle) Build\b' + device_replacement: 'Kindle' + brand_replacement: 'Amazon' + model_replacement: 'Kindle' + - regex: '; ?(Silk)/(\d+)\.(\d+)(?:\.([0-9\-]+))? Build\b' + device_replacement: 'Kindle Fire' + brand_replacement: 'Amazon' + model_replacement: 'Kindle Fire$2' + - regex: ' (Kindle)/(\d+\.\d+)' + device_replacement: 'Kindle' + brand_replacement: 'Amazon' + model_replacement: '$1 $2' + - regex: ' (Silk|Kindle)/(\d+)\.' + device_replacement: 'Kindle' + brand_replacement: 'Amazon' + model_replacement: 'Kindle' + + ######### + # Devices from chinese manufacturer(s) + # @note: identified by x-wap-profile http://218.249.47.94/Xianghe/.* + ######### + - regex: '(sprd)\-([^/]+)/' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + # @ref: http://eshinechina.en.alibaba.com/ + - regex: '; *(H\d{2}00\+?) Build' + device_replacement: '$1' + brand_replacement: 'Hero' + model_replacement: '$1' + - regex: '; *(iphone|iPhone5) Build/' + device_replacement: 'Xianghe $1' + brand_replacement: 'Xianghe' + model_replacement: '$1' + - regex: '; *(e\d{4}[a-z]?_?v\d+|v89_[^;/]+)[^;/]+ Build/' + device_replacement: 'Xianghe $1' + brand_replacement: 'Xianghe' + model_replacement: '$1' + + ######### + # Cellular + # @ref: + # @note: Operator branded devices + ######### + - regex: '\bUSCC[_\-]?([^ ;/\)]+)' + device_replacement: '$1' + brand_replacement: 'Cellular' + model_replacement: '$1' + + ###################################################################### + # Windows Phone Parsers + ###################################################################### + + ######### + # Alcatel Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:ALCATEL)[^;]*; *([^;,\)]+)' + device_replacement: 'Alcatel $1' + brand_replacement: 'Alcatel' + model_replacement: '$1' + + ######### + # Asus Windows Phones + ######### + #~ - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?|WpsLondonTest; ?)?(?:ASUS|Asus)[^;]*; *([^;,\)]+)' + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?|WpsLondonTest; ?)?(?:ASUS|Asus)[^;]*; *([^;,\)]+)' + device_replacement: 'Asus $1' + brand_replacement: 'Asus' + model_replacement: '$1' + + ######### + # Dell Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:DELL|Dell)[^;]*; *([^;,\)]+)' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: '$1' + + ######### + # HTC Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?|WpsLondonTest; ?)?(?:HTC|Htc|HTC_blocked[^;]*)[^;]*; *(?:HTC)?([^;,\)]+)' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + + ######### + # Huawei Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:HUAWEI)[^;]*; *(?:HUAWEI )?([^;,\)]+)' + device_replacement: 'Huawei $1' + brand_replacement: 'Huawei' + model_replacement: '$1' + + ######### + # LG Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:LG|Lg)[^;]*; *(?:LG[ \-])?([^;,\)]+)' + device_replacement: 'LG $1' + brand_replacement: 'LG' + model_replacement: '$1' + + ######### + # Noka Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:NOKIA|Nokia)[^;]*; *(?:NOKIA ?|Nokia ?|LUMIA ?|[Ll]umia ?)*(\d{3,}[^;\)]*)' + device_replacement: 'Lumia $1' + brand_replacement: 'Nokia' + model_replacement: 'Lumia $1' + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:NOKIA|Nokia)[^;]*; *(RM-\d{3,})' + device_replacement: 'Nokia $1' + brand_replacement: 'Nokia' + model_replacement: '$1' + - regex: '(?:Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)]|WPDesktop;) ?(?:ARM; ?Touch; ?|Touch; ?)?(?:NOKIA|Nokia)[^;]*; *(?:NOKIA ?|Nokia ?|LUMIA ?|[Ll]umia ?)*([^;\)]+)' + device_replacement: 'Nokia $1' + brand_replacement: 'Nokia' + model_replacement: '$1' + + ######### + # Microsoft Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?)?(?:Microsoft(?: Corporation)?)[^;]*; *([^;,\)]+)' + device_replacement: 'Microsoft $1' + brand_replacement: 'Microsoft' + model_replacement: '$1' + + ######### + # Samsung Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?|WpsLondonTest; ?)?(?:SAMSUNG)[^;]*; *(?:SAMSUNG )?([^;,\.\)]+)' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + + ######### + # Toshiba Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?|WpsLondonTest; ?)?(?:TOSHIBA|FujitsuToshibaMobileCommun)[^;]*; *([^;,\)]+)' + device_replacement: 'Toshiba $1' + brand_replacement: 'Toshiba' + model_replacement: '$1' + + ######### + # Generic Windows Phones + ######### + - regex: 'Windows Phone [^;]+; .*?IEMobile/[^;\)]+[;\)] ?(?:ARM; ?Touch; ?|Touch; ?|WpsLondonTest; ?)?([^;]+); *([^;,\)]+)' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ###################################################################### + # Other Devices Parser + ###################################################################### + + ######### + # Samsung Bada Phones + ######### + - regex: '(?:^|; )SAMSUNG\-([A-Za-z0-9\-]+).* Bada/' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + + ######### + # Firefox OS + ######### + - regex: '\(Mobile; ALCATEL ?(One|ONE) ?(Touch|TOUCH) ?([^;/]+)(?:/[^;]+)?; rv:[^\)]+\) Gecko/[^\/]+ Firefox/' + device_replacement: 'Alcatel $1 $2 $3' + brand_replacement: 'Alcatel' + model_replacement: 'One Touch $3' + - regex: '\(Mobile; (?:ZTE([^;]+)|(OpenC)); rv:[^\)]+\) Gecko/[^\/]+ Firefox/' + device_replacement: 'ZTE $1$2' + brand_replacement: 'ZTE' + model_replacement: '$1$2' + + ########## + # NOKIA + # @note: NokiaN8-00 comes before iphone. Sometimes spoofs iphone + ########## + - regex: 'Nokia(N[0-9]+)([A-z_\-][A-z0-9_\-]*)' + device_replacement: 'Nokia $1' + brand_replacement: 'Nokia' + model_replacement: '$1$2' + - regex: '(?:NOKIA|Nokia)(?:\-| *)(?:([A-Za-z0-9]+)\-[0-9a-f]{32}|([A-Za-z0-9\-]+)(?:UCBrowser)|([A-Za-z0-9\-]+))' + device_replacement: 'Nokia $1$2$3' + brand_replacement: 'Nokia' + model_replacement: '$1$2$3' + - regex: 'Lumia ([A-Za-z0-9\-]+)' + device_replacement: 'Lumia $1' + brand_replacement: 'Nokia' + model_replacement: 'Lumia $1' + # UCWEB Browser on Symbian + - regex: '\(Symbian; U; S60 V5; [A-z]{2}\-[A-z]{2}; (SonyEricsson|Samsung|Nokia|LG)([^;/]+)\)' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + # Nokia Symbian + - regex: '\(Symbian(?:/3)?; U; ([^;]+);' + device_replacement: 'Nokia $1' + brand_replacement: 'Nokia' + model_replacement: '$1' + + ########## + # BlackBerry + # @ref: http://www.useragentstring.com/pages/BlackBerry/ + ########## + - regex: 'BB10; ([A-Za-z0-9\- ]+)\)' + device_replacement: 'BlackBerry $1' + brand_replacement: 'BlackBerry' + model_replacement: '$1' + - regex: 'Play[Bb]ook.+RIM Tablet OS' + device_replacement: 'BlackBerry Playbook' + brand_replacement: 'BlackBerry' + model_replacement: 'Playbook' + - regex: 'Black[Bb]erry ([0-9]+);' + device_replacement: 'BlackBerry $1' + brand_replacement: 'BlackBerry' + model_replacement: '$1' + - regex: 'Black[Bb]erry([0-9]+)' + device_replacement: 'BlackBerry $1' + brand_replacement: 'BlackBerry' + model_replacement: '$1' + - regex: 'Black[Bb]erry;' + device_replacement: 'BlackBerry' + brand_replacement: 'BlackBerry' + + ########## + # PALM / HP + # @note: some palm devices must come before iphone. sometimes spoofs iphone in ua + ########## + - regex: '(Pre|Pixi)/\d+\.\d+' + device_replacement: 'Palm $1' + brand_replacement: 'Palm' + model_replacement: '$1' + - regex: 'Palm([0-9]+)' + device_replacement: 'Palm $1' + brand_replacement: 'Palm' + model_replacement: '$1' + - regex: 'Treo([A-Za-z0-9]+)' + device_replacement: 'Palm Treo $1' + brand_replacement: 'Palm' + model_replacement: 'Treo $1' + - regex: 'webOS.*(P160U(?:NA)?)/(\d+).(\d+)' + device_replacement: 'HP Veer' + brand_replacement: 'HP' + model_replacement: 'Veer' + - regex: '(Touch[Pp]ad)/\d+\.\d+' + device_replacement: 'HP TouchPad' + brand_replacement: 'HP' + model_replacement: 'TouchPad' + - regex: 'HPiPAQ([A-Za-z0-9]+)/\d+.\d+' + device_replacement: 'HP iPAQ $1' + brand_replacement: 'HP' + model_replacement: 'iPAQ $1' + - regex: 'PDA; (PalmOS)/sony/model ([a-z]+)/Revision' + device_replacement: '$1' + brand_replacement: 'Sony' + model_replacement: '$1 $2' + + ########## + # AppleTV + # No built in browser that I can tell + # Stack Overflow indicated iTunes-AppleTV/4.1 as a known UA for app available and I'm seeing it in live traffic + ########## + - regex: '(Apple\s?TV)' + device_replacement: 'AppleTV' + brand_replacement: 'Apple' + model_replacement: 'AppleTV' + + ######### + # Tesla Model S + ######### + - regex: '(QtCarBrowser)' + device_replacement: 'Tesla Model S' + brand_replacement: 'Tesla' + model_replacement: 'Model S' + + ########## + # iSTUFF + # @note: complete but probably catches spoofs + # ipad and ipod must be parsed before iphone + # cannot determine specific device type from ua string. (3g, 3gs, 4, etc) + ########## + # @note: on some ua the device can be identified e.g. iPhone5,1 + - regex: '((?:iPhone|iPad|iPod)\d+,\d+)' + device_replacement: '$1' + brand_replacement: 'Apple' + model_replacement: '$1' + # @note: iPad needs to be before iPhone + - regex: '(iPad)(?:;| Simulator;)' + device_replacement: '$1' + brand_replacement: 'Apple' + model_replacement: '$1' + - regex: '(iPod)(?:;| touch;| Simulator;)' + device_replacement: '$1' + brand_replacement: 'Apple' + model_replacement: '$1' + - regex: '(iPhone)(?:;| Simulator;)' + device_replacement: '$1' + brand_replacement: 'Apple' + model_replacement: '$1' + # @note: desktop applications show device info + - regex: 'CFNetwork/.* Darwin/\d.*\(((?:Mac|iMac|PowerMac|PowerBook)[^\d]*)(\d+)(?:,|%2C)(\d+)' + device_replacement: '$1$2,$3' + brand_replacement: 'Apple' + model_replacement: '$1$2,$3' + # @note: iOS applications do not show device info + - regex: 'CFNetwork/.* Darwin/\d' + device_replacement: 'iOS-Device' + brand_replacement: 'Apple' + model_replacement: 'iOS-Device' + + ########## + # Acer + ########## + - regex: 'acer_([A-Za-z0-9]+)_' + device_replacement: 'Acer $1' + brand_replacement: 'Acer' + model_replacement: '$1' + + ########## + # Alcatel + ########## + - regex: '(?:ALCATEL|Alcatel)-([A-Za-z0-9\-]+)' + device_replacement: 'Alcatel $1' + brand_replacement: 'Alcatel' + model_replacement: '$1' + + ########## + # Amoi + ########## + - regex: '(?:Amoi|AMOI)\-([A-Za-z0-9]+)' + device_replacement: 'Amoi $1' + brand_replacement: 'Amoi' + model_replacement: '$1' + + ########## + # Asus + ########## + - regex: '(?:; |\/|^)((?:Transformer (?:Pad|Prime) |Transformer |PadFone[ _]?)[A-Za-z0-9]*)' + device_replacement: 'Asus $1' + brand_replacement: 'Asus' + model_replacement: '$1' + - regex: '(?:asus.*?ASUS|Asus|ASUS|asus)[\- ;]*((?:Transformer (?:Pad|Prime) |Transformer |Padfone |Nexus[ _])?[A-Za-z0-9]+)' + device_replacement: 'Asus $1' + brand_replacement: 'Asus' + model_replacement: '$1' + + + ########## + # Bird + ########## + - regex: '\bBIRD[ \-\.]([A-Za-z0-9]+)' + device_replacement: 'Bird $1' + brand_replacement: 'Bird' + model_replacement: '$1' + + ########## + # Dell + ########## + - regex: '\bDell ([A-Za-z0-9]+)' + device_replacement: 'Dell $1' + brand_replacement: 'Dell' + model_replacement: '$1' + + ########## + # DoCoMo + ########## + - regex: 'DoCoMo/2\.0 ([A-Za-z0-9]+)' + device_replacement: 'DoCoMo $1' + brand_replacement: 'DoCoMo' + model_replacement: '$1' + - regex: '([A-Za-z0-9]+)_W;FOMA' + device_replacement: 'DoCoMo $1' + brand_replacement: 'DoCoMo' + model_replacement: '$1' + - regex: '([A-Za-z0-9]+);FOMA' + device_replacement: 'DoCoMo $1' + brand_replacement: 'DoCoMo' + model_replacement: '$1' + + ########## + # htc + ########## + - regex: '\b(?:HTC/|HTC/[a-z0-9]+/)?HTC[ _\-;]? *(.*?)(?:-?Mozilla|fingerPrint|[;/\(\)]|$)' + device_replacement: 'HTC $1' + brand_replacement: 'HTC' + model_replacement: '$1' + + ########## + # Huawei + ########## + - regex: 'Huawei([A-Za-z0-9]+)' + device_replacement: 'Huawei $1' + brand_replacement: 'Huawei' + model_replacement: '$1' + - regex: 'HUAWEI-([A-Za-z0-9]+)' + device_replacement: 'Huawei $1' + brand_replacement: 'Huawei' + model_replacement: '$1' + - regex: 'vodafone([A-Za-z0-9]+)' + device_replacement: 'Huawei Vodafone $1' + brand_replacement: 'Huawei' + model_replacement: 'Vodafone $1' + + ########## + # i-mate + ########## + - regex: 'i\-mate ([A-Za-z0-9]+)' + device_replacement: 'i-mate $1' + brand_replacement: 'i-mate' + model_replacement: '$1' + + ########## + # kyocera + ########## + - regex: 'Kyocera\-([A-Za-z0-9]+)' + device_replacement: 'Kyocera $1' + brand_replacement: 'Kyocera' + model_replacement: '$1' + - regex: 'KWC\-([A-Za-z0-9]+)' + device_replacement: 'Kyocera $1' + brand_replacement: 'Kyocera' + model_replacement: '$1' + + ########## + # lenovo + ########## + - regex: 'Lenovo[_\-]([A-Za-z0-9]+)' + device_replacement: 'Lenovo $1' + brand_replacement: 'Lenovo' + model_replacement: '$1' + + ########## + # HbbTV (European and Australian standard) + # written before the LG regexes, as LG is making HbbTV too + ########## + - regex: '(HbbTV)/[0-9]+\.[0-9]+\.[0-9]+ \([^;]*; *(LG)E *; *([^;]*) *;[^;]*;[^;]*;\)' + device_replacement: '$1' + brand_replacement: '$2' + model_replacement: '$3' + - regex: '(HbbTV)/1\.1\.1.*CE-HTML/1\.\d;(Vendor/)*(THOM[^;]*?)[;\s](?:.*SW-Version/.*)*(LF[^;]+);?' + device_replacement: '$1' + brand_replacement: 'Thomson' + model_replacement: '$4' + - regex: '(HbbTV)(?:/1\.1\.1)?(?: ?\(;;;;;\))?; *CE-HTML(?:/1\.\d)?; *([^ ]+) ([^;]+);' + device_replacement: '$1' + brand_replacement: '$2' + model_replacement: '$3' + - regex: '(HbbTV)/1\.1\.1 \(;;;;;\) Maple_2011' + device_replacement: '$1' + brand_replacement: 'Samsung' + - regex: '(HbbTV)/[0-9]+\.[0-9]+\.[0-9]+ \([^;]*; *(?:CUS:([^;]*)|([^;]+)) *; *([^;]*) *;.*;' + device_replacement: '$1' + brand_replacement: '$2$3' + model_replacement: '$4' + - regex: '(HbbTV)/[0-9]+\.[0-9]+\.[0-9]+' + device_replacement: '$1' + + ########## + # LGE NetCast TV + ########## + - regex: 'LGE; (?:Media\/)?([^;]*);[^;]*;[^;]*;?\); "?LG NetCast(\.TV|\.Media|)-\d+' + device_replacement: 'NetCast$2' + brand_replacement: 'LG' + model_replacement: '$1' + + ########## + # InettvBrowser + ########## + - regex: 'InettvBrowser/[0-9]+\.[0-9A-Z]+ \([^;]*;(Sony)([^;]*);[^;]*;[^\)]*\)' + device_replacement: 'Inettv' + brand_replacement: '$1' + model_replacement: '$2' + - regex: 'InettvBrowser/[0-9]+\.[0-9A-Z]+ \([^;]*;([^;]*);[^;]*;[^\)]*\)' + device_replacement: 'Inettv' + brand_replacement: 'Generic_Inettv' + model_replacement: '$1' + - regex: '(?:InettvBrowser|TSBNetTV|NETTV|HBBTV)' + device_replacement: 'Inettv' + brand_replacement: 'Generic_Inettv' + + ########## + # lg + ########## + # LG Symbian Phones + - regex: 'Series60/\d\.\d (LG)[\-]?([A-Za-z0-9 \-]+)' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + # other LG phones + - regex: '\b(?:LGE[ \-]LG\-(?:AX)?|LGE |LGE?-LG|LGE?[ \-]|LG[ /\-]|lg[\-])([A-Za-z0-9]+)\b' + device_replacement: 'LG $1' + brand_replacement: 'LG' + model_replacement: '$1' + - regex: '(?:^LG[\-]?|^LGE[\-/]?)([A-Za-z]+[0-9]+[A-Za-z]*)' + device_replacement: 'LG $1' + brand_replacement: 'LG' + model_replacement: '$1' + - regex: '^LG([0-9]+[A-Za-z]*)' + device_replacement: 'LG $1' + brand_replacement: 'LG' + model_replacement: '$1' + + ########## + # microsoft + ########## + - regex: '(KIN\.[^ ]+) (\d+)\.(\d+)' + device_replacement: 'Microsoft $1' + brand_replacement: 'Microsoft' + model_replacement: '$1' + - regex: '(?:MSIE|XBMC).*\b(Xbox)\b' + device_replacement: '$1' + brand_replacement: 'Microsoft' + model_replacement: '$1' + - regex: '; ARM; Trident/6\.0; Touch[\);]' + device_replacement: 'Microsoft Surface RT' + brand_replacement: 'Microsoft' + model_replacement: 'Surface RT' + + ########## + # motorola + ########## + - regex: 'Motorola\-([A-Za-z0-9]+)' + device_replacement: 'Motorola $1' + brand_replacement: 'Motorola' + model_replacement: '$1' + - regex: 'MOTO\-([A-Za-z0-9]+)' + device_replacement: 'Motorola $1' + brand_replacement: 'Motorola' + model_replacement: '$1' + - regex: 'MOT\-([A-z0-9][A-z0-9\-]*)' + device_replacement: 'Motorola $1' + brand_replacement: 'Motorola' + model_replacement: '$1' + + ########## + # nintendo + ########## + - regex: 'Nintendo WiiU' + device_replacement: 'Nintendo Wii U' + brand_replacement: 'Nintendo' + model_replacement: 'Wii U' + - regex: 'Nintendo (DS|3DS|DSi|Wii);' + device_replacement: 'Nintendo $1' + brand_replacement: 'Nintendo' + model_replacement: '$1' + + ########## + # pantech + ########## + - regex: '(?:Pantech|PANTECH)[ _-]?([A-Za-z0-9\-]+)' + device_replacement: 'Pantech $1' + brand_replacement: 'Pantech' + model_replacement: '$1' + + ########## + # philips + ########## + - regex: 'Philips([A-Za-z0-9]+)' + device_replacement: 'Philips $1' + brand_replacement: 'Philips' + model_replacement: '$1' + - regex: 'Philips ([A-Za-z0-9]+)' + device_replacement: 'Philips $1' + brand_replacement: 'Philips' + model_replacement: '$1' + + ########## + # Samsung + ########## + # Samsung Symbian Devices + - regex: 'SymbianOS/9\.\d.* Samsung[/\-]([A-Za-z0-9 \-]+)' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + - regex: '(Samsung)(SGH)(i[0-9]+)' + device_replacement: '$1 $2$3' + brand_replacement: '$1' + model_replacement: '$2-$3' + - regex: 'SAMSUNG-ANDROID-MMS/([^;/]+)' + device_replacement: '$1' + brand_replacement: 'Samsung' + model_replacement: '$1' + # Other Samsung + #- regex: 'SAMSUNG(?:; |-)([A-Za-z0-9\-]+)' + - regex: 'SAMSUNG(?:; |[ -/])([A-Za-z0-9\-]+)' + regex_flag: 'i' + device_replacement: 'Samsung $1' + brand_replacement: 'Samsung' + model_replacement: '$1' + + ########## + # Sega + ########## + - regex: '(Dreamcast)' + device_replacement: 'Sega $1' + brand_replacement: 'Sega' + model_replacement: '$1' + + ########## + # Siemens mobile + ########## + - regex: '^SIE-([A-Za-z0-9]+)' + device_replacement: 'Siemens $1' + brand_replacement: 'Siemens' + model_replacement: '$1' + + ########## + # Softbank + ########## + - regex: 'Softbank/[12]\.0/([A-Za-z0-9]+)' + device_replacement: 'Softbank $1' + brand_replacement: 'Softbank' + model_replacement: '$1' + + ########## + # SonyEricsson + ########## + - regex: 'SonyEricsson ?([A-Za-z0-9\-]+)' + device_replacement: 'Ericsson $1' + brand_replacement: 'SonyEricsson' + model_replacement: '$1' + + ########## + # Sony + ########## + - regex: 'Android [^;]+; ([^ ]+) (Sony)/' + device_replacement: '$2 $1' + brand_replacement: '$2' + model_replacement: '$1' + - regex: '(Sony)(?:BDP\/|\/)?([^ /;\)]+)[ /;\)]' + device_replacement: '$1 $2' + brand_replacement: '$1' + model_replacement: '$2' + + ######### + # Puffin Browser Device detect + # A=Android, I=iOS, P=Phone, T=Tablet + # AT=Android+Tablet + ######### + - regex: 'Puffin/[\d\.]+IT' + device_replacement: 'iPad' + brand_replacement: 'Apple' + model_replacement: 'iPad' + - regex: 'Puffin/[\d\.]+IP' + device_replacement: 'iPhone' + brand_replacement: 'Apple' + model_replacement: 'iPhone' + - regex: 'Puffin/[\d\.]+AT' + device_replacement: 'Generic Tablet' + brand_replacement: 'Generic' + model_replacement: 'Tablet' + - regex: 'Puffin/[\d\.]+AP' + device_replacement: 'Generic Smartphone' + brand_replacement: 'Generic' + model_replacement: 'Smartphone' + + ######### + # Android General Device Matching (far from perfect) + ######### + - regex: 'Android[\- ][\d]+\.[\d]+; [A-Za-z]{2}\-[A-Za-z]{0,2}; WOWMobile (.+) Build' + brand_replacement: 'Generic_Android' + model_replacement: '$1' + - regex: 'Android[\- ][\d]+\.[\d]+\-update1; [A-Za-z]{2}\-[A-Za-z]{0,2} *; *(.+?) Build' + brand_replacement: 'Generic_Android' + model_replacement: '$1' + - regex: 'Android[\- ][\d]+(?:\.[\d]+){1,2}; *[A-Za-z]{2}[_\-][A-Za-z]{0,2}\-? *; *(.+?) Build' + brand_replacement: 'Generic_Android' + model_replacement: '$1' + - regex: 'Android[\- ][\d]+(?:\.[\d]+){1,2}; *[A-Za-z]{0,2}\- *; *(.+?) Build' + brand_replacement: 'Generic_Android' + model_replacement: '$1' + # No build info at all - "Build" follows locale immediately + - regex: 'Android[\- ][\d]+(?:\.[\d]+){1,2}; *[a-z]{0,2}[_\-]?[A-Za-z]{0,2};? Build' + device_replacement: 'Generic Smartphone' + brand_replacement: 'Generic' + model_replacement: 'Smartphone' + - regex: 'Android[\- ][\d]+(?:\.[\d]+){1,2}; *\-?[A-Za-z]{2}; *(.+?) Build' + brand_replacement: 'Generic_Android' + model_replacement: '$1' + - regex: 'Android[\- ][\d]+(?:\.[\d]+){1,2}(?:;.*)?; *(.+?) Build' + brand_replacement: 'Generic_Android' + model_replacement: '$1' + + ########## + # Google TV + ########## + - regex: '(GoogleTV)' + brand_replacement: 'Generic_Inettv' + model_replacement: '$1' + + ########## + # WebTV + ########## + - regex: '(WebTV)/\d+.\d+' + brand_replacement: 'Generic_Inettv' + model_replacement: '$1' + # Roku Digital-Video-Players https://www.roku.com/ + - regex: '^(Roku)/DVP-\d+\.\d+' + brand_replacement: 'Generic_Inettv' + model_replacement: '$1' + + ########## + # Generic Tablet + ########## + - regex: '(Android 3\.\d|Opera Tablet|Tablet; .+Firefox/|Android.*(?:Tab|Pad))' + regex_flag: 'i' + device_replacement: 'Generic Tablet' + brand_replacement: 'Generic' + model_replacement: 'Tablet' + + ########## + # Generic Smart Phone + ########## + - regex: '(Symbian|\bS60(Version|V\d)|\bS60\b|\((Series 60|Windows Mobile|Palm OS|Bada); Opera Mini|Windows CE|Opera Mobi|BREW|Brew|Mobile; .+Firefox/|iPhone OS|Android|MobileSafari|Windows *Phone|\(webOS/|PalmOS)' + device_replacement: 'Generic Smartphone' + brand_replacement: 'Generic' + model_replacement: 'Smartphone' + - regex: '(hiptop|avantgo|plucker|xiino|blazer|elaine)' + regex_flag: 'i' + device_replacement: 'Generic Smartphone' + brand_replacement: 'Generic' + model_replacement: 'Smartphone' + + ########## + # Spiders (this is hack...) + ########## + - regex: '(bot|zao|borg|DBot|oegp|silk|Xenu|zeal|^NING|CCBot|crawl|htdig|lycos|slurp|teoma|voila|yahoo|Sogou|CiBra|Nutch|^Java/|^JNLP/|Daumoa|Genieo|ichiro|larbin|pompos|Scrapy|snappy|speedy|spider|msnbot|msrbot|vortex|^vortex|crawler|favicon|indexer|Riddler|scooter|scraper|scrubby|WhatWeb|WinHTTP|bingbot|openbot|gigabot|furlbot|polybot|seekbot|^voyager|archiver|Icarus6j|mogimogi|Netvibes|blitzbot|altavista|charlotte|findlinks|Retreiver|TLSProber|WordPress|SeznamBot|ProoXiBot|wsr\-agent|Squrl Java|EtaoSpider|PaperLiBot|SputnikBot|A6\-Indexer|netresearch|searchsight|baiduspider|YisouSpider|ICC\-Crawler|http%20client|Python-urllib|dataparksearch|converacrawler|Screaming Frog|AppEngine-Google|YahooCacheSystem|fast\-webcrawler|Sogou Pic Spider|semanticdiscovery|Innovazion Crawler|facebookexternalhit|Google.*/\+/web/snippet|Google-HTTP-Java-Client|BlogBridge|IlTrovatore-Setaccio|InternetArchive|GomezAgent|WebThumbnail|heritrix|NewsGator|PagePeeker|Reaper|ZooShot|holmes)' + regex_flag: 'i' + device_replacement: 'Spider' + brand_replacement: 'Spider' + model_replacement: 'Desktop' + + ########## + # Generic Feature Phone + # take care to do case insensitive matching + ########## + - regex: '^(1207|3gso|4thp|501i|502i|503i|504i|505i|506i|6310|6590|770s|802s|a wa|acer|acs\-|airn|alav|asus|attw|au\-m|aur |aus |abac|acoo|aiko|alco|alca|amoi|anex|anny|anyw|aptu|arch|argo|bmobile|bell|bird|bw\-n|bw\-u|beck|benq|bilb|blac|c55/|cdm\-|chtm|capi|comp|cond|dall|dbte|dc\-s|dica|ds\-d|ds12|dait|devi|dmob|doco|dopo|dorado|el(?:38|39|48|49|50|55|58|68)|el[3456]\d{2}dual|erk0|esl8|ex300|ez40|ez60|ez70|ezos|ezze|elai|emul|eric|ezwa|fake|fly\-|fly_|g\-mo|g1 u|g560|gf\-5|grun|gene|go.w|good|grad|hcit|hd\-m|hd\-p|hd\-t|hei\-|hp i|hpip|hs\-c|htc |htc\-|htca|htcg)' + regex_flag: 'i' + device_replacement: 'Generic Feature Phone' + brand_replacement: 'Generic' + model_replacement: 'Feature Phone' + - regex: '^(htcp|htcs|htct|htc_|haie|hita|huaw|hutc|i\-20|i\-go|i\-ma|i\-mobile|i230|iac|iac\-|iac/|ig01|im1k|inno|iris|jata|kddi|kgt|kgt/|kpt |kwc\-|klon|lexi|lg g|lg\-a|lg\-b|lg\-c|lg\-d|lg\-f|lg\-g|lg\-k|lg\-l|lg\-m|lg\-o|lg\-p|lg\-s|lg\-t|lg\-u|lg\-w|lg/k|lg/l|lg/u|lg50|lg54|lge\-|lge/|leno|m1\-w|m3ga|m50/|maui|mc01|mc21|mcca|medi|meri|mio8|mioa|mo01|mo02|mode|modo|mot |mot\-|mt50|mtp1|mtv |mate|maxo|merc|mits|mobi|motv|mozz|n100|n101|n102|n202|n203|n300|n302|n500|n502|n505|n700|n701|n710|nec\-|nem\-|newg|neon)' + regex_flag: 'i' + device_replacement: 'Generic Feature Phone' + brand_replacement: 'Generic' + model_replacement: 'Feature Phone' + - regex: '^(netf|noki|nzph|o2 x|o2\-x|opwv|owg1|opti|oran|ot\-s|p800|pand|pg\-1|pg\-2|pg\-3|pg\-6|pg\-8|pg\-c|pg13|phil|pn\-2|pt\-g|palm|pana|pire|pock|pose|psio|qa\-a|qc\-2|qc\-3|qc\-5|qc\-7|qc07|qc12|qc21|qc32|qc60|qci\-|qwap|qtek|r380|r600|raks|rim9|rove|s55/|sage|sams|sc01|sch\-|scp\-|sdk/|se47|sec\-|sec0|sec1|semc|sgh\-|shar|sie\-|sk\-0|sl45|slid|smb3|smt5|sp01|sph\-|spv |spv\-|sy01|samm|sany|sava|scoo|send|siem|smar|smit|soft|sony|t\-mo|t218|t250|t600|t610|t618|tcl\-|tdg\-|telm|tim\-|ts70|tsm\-|tsm3|tsm5|tx\-9|tagt)' + regex_flag: 'i' + device_replacement: 'Generic Feature Phone' + brand_replacement: 'Generic' + model_replacement: 'Feature Phone' + - regex: '^(talk|teli|topl|tosh|up.b|upg1|utst|v400|v750|veri|vk\-v|vk40|vk50|vk52|vk53|vm40|vx98|virg|vertu|vite|voda|vulc|w3c |w3c\-|wapj|wapp|wapu|wapm|wig |wapi|wapr|wapv|wapy|wapa|waps|wapt|winc|winw|wonu|x700|xda2|xdag|yas\-|your|zte\-|zeto|aste|audi|avan|blaz|brew|brvw|bumb|ccwa|cell|cldc|cmd\-|dang|eml2|fetc|hipt|http|ibro|idea|ikom|ipaq|jbro|jemu|jigs|keji|kyoc|kyok|libw|m\-cr|midp|mmef|moto|mwbp|mywa|newt|nok6|o2im|pant|pdxg|play|pluc|port|prox|rozo|sama|seri|smal|symb|treo|upsi|vx52|vx53|vx60|vx61|vx70|vx80|vx81|vx83|vx85|wap\-|webc|whit|wmlb|xda\-|xda_)' + regex_flag: 'i' + device_replacement: 'Generic Feature Phone' + brand_replacement: 'Generic' + model_replacement: 'Feature Phone' + - regex: '^(Ice)$' + device_replacement: 'Generic Feature Phone' + brand_replacement: 'Generic' + model_replacement: 'Feature Phone' + - regex: '(wap[\-\ ]browser|maui|netfront|obigo|teleca|up\.browser|midp|Opera Mini)' + regex_flag: 'i' + device_replacement: 'Generic Feature Phone' + brand_replacement: 'Generic' + model_replacement: 'Feature Phone' diff --git a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java new file mode 100644 index 00000000000..10c6ccd7941 --- /dev/null +++ b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorFactoryTests.java @@ -0,0 +1,174 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.useragent; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + +public class UserAgentProcessorFactoryTests extends ESTestCase { + + private static Map userAgentParsers; + + private static String regexWithoutDevicesFilename = "regexes_without_devices.yaml"; + private static Path userAgentConfigDir; + + @BeforeClass + public static void createUserAgentParsers() throws IOException { + Path configDir = createTempDir(); + userAgentConfigDir = configDir.resolve("ingest-user-agent"); + Files.createDirectories(userAgentConfigDir); + + // Copy file, leaving out the device parsers at the end + try (BufferedReader reader = new BufferedReader( + new InputStreamReader(UserAgentProcessor.class.getResourceAsStream("/regexes.yaml"), StandardCharsets.UTF_8)); + BufferedWriter writer = Files.newBufferedWriter(userAgentConfigDir.resolve(regexWithoutDevicesFilename));) { + String line; + while ((line = reader.readLine()) != null) { + if (line.startsWith("device_parsers:")) { + break; + } + + writer.write(line); + writer.newLine(); + } + } + + userAgentParsers = IngestUserAgentPlugin.createUserAgentParsers(userAgentConfigDir, new UserAgentCache(1000)); + } + + public void testBuildDefaults() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Map config = new HashMap<>(); + config.put("field", "_field"); + + String processorTag = randomAsciiOfLength(10); + + UserAgentProcessor processor = factory.create(null, processorTag, config); + assertThat(processor.getTag(), equalTo(processorTag)); + assertThat(processor.getField(), equalTo("_field")); + assertThat(processor.getTargetField(), equalTo("user_agent")); + assertThat(processor.getUaParser().getUaPatterns().size(), greaterThan(0)); + assertThat(processor.getUaParser().getOsPatterns().size(), greaterThan(0)); + assertThat(processor.getUaParser().getDevicePatterns().size(), greaterThan(0)); + assertThat(processor.getProperties(), equalTo(EnumSet.allOf(UserAgentProcessor.Property.class))); + } + + public void testBuildTargetField() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("target_field", "_target_field"); + + UserAgentProcessor processor = factory.create(null, null, config); + assertThat(processor.getField(), equalTo("_field")); + assertThat(processor.getTargetField(), equalTo("_target_field")); + } + + public void testBuildRegexFile() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("regex_file", regexWithoutDevicesFilename); + + UserAgentProcessor processor = factory.create(null, null, config); + assertThat(processor.getField(), equalTo("_field")); + assertThat(processor.getUaParser().getUaPatterns().size(), greaterThan(0)); + assertThat(processor.getUaParser().getOsPatterns().size(), greaterThan(0)); + assertThat(processor.getUaParser().getDevicePatterns().size(), equalTo(0)); + } + + public void testBuildNonExistingRegexFile() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("regex_file", "does-not-exist.yaml"); + + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); + assertThat(e.getMessage(), equalTo("[regex_file] regex file [does-not-exist.yaml] doesn't exist (has to exist at node startup)")); + } + + public void testBuildFields() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Set properties = EnumSet.noneOf(UserAgentProcessor.Property.class); + List fieldNames = new ArrayList<>(); + int numFields = scaledRandomIntBetween(1, UserAgentProcessor.Property.values().length); + for (int i = 0; i < numFields; i++) { + UserAgentProcessor.Property property = UserAgentProcessor.Property.values()[i]; + properties.add(property); + fieldNames.add(property.name().toLowerCase(Locale.ROOT)); + } + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("properties", fieldNames); + + UserAgentProcessor processor = factory.create(null, null, config); + assertThat(processor.getField(), equalTo("_field")); + assertThat(processor.getProperties(), equalTo(properties)); + } + + public void testInvalidProperty() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("properties", Collections.singletonList("invalid")); + + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); + assertThat(e.getMessage(), equalTo("[properties] illegal property value [invalid]. valid values are [NAME, MAJOR, MINOR, " + + "PATCH, OS, OS_NAME, OS_MAJOR, OS_MINOR, DEVICE, BUILD]")); + } + + public void testInvalidPropertiesType() throws Exception { + UserAgentProcessor.Factory factory = new UserAgentProcessor.Factory(userAgentParsers); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("properties", "invalid"); + + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); + assertThat(e.getMessage(), equalTo("[properties] property isn't a list, but of type [java.lang.String]")); + } +} diff --git a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java new file mode 100644 index 00000000000..d9b5eed059a --- /dev/null +++ b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentProcessorTests.java @@ -0,0 +1,161 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest.useragent; + +import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.useragent.UserAgentProcessor; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.io.InputStream; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; + +public class UserAgentProcessorTests extends ESTestCase { + + private static UserAgentProcessor processor; + + @BeforeClass + public static void setupProcessor() throws IOException { + InputStream regexStream = UserAgentProcessor.class.getResourceAsStream("/regexes.yaml"); + assertNotNull(regexStream); + + UserAgentParser parser = new UserAgentParser(randomAsciiOfLength(10), regexStream, new UserAgentCache(1000)); + + processor = new UserAgentProcessor(randomAsciiOfLength(10), "source_field", "target_field", parser, + EnumSet.allOf(UserAgentProcessor.Property.class)); + } + + @SuppressWarnings("unchecked") + public void testCommonBrowser() throws Exception { + Map document = new HashMap<>(); + document.put("source_field", + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + + processor.execute(ingestDocument); + Map data = ingestDocument.getSourceAndMetadata(); + + assertThat(data, hasKey("target_field")); + Map target = (Map) data.get("target_field"); + + assertThat(target.get("name"), is("Chrome")); + assertThat(target.get("major"), is("33")); + assertThat(target.get("minor"), is("0")); + assertThat(target.get("patch"), is("1750")); + assertNull(target.get("build")); + + assertThat(target.get("os"), is("Mac OS X 10.9.2")); + assertThat(target.get("os_name"), is("Mac OS X")); + assertThat(target.get("os_major"), is("10")); + assertThat(target.get("os_minor"), is("9")); + + assertThat(target.get("device"), is("Other")); + } + + @SuppressWarnings("unchecked") + public void testUncommonDevice() throws Exception { + Map document = new HashMap<>(); + document.put("source_field", + "Mozilla/5.0 (Linux; U; Android 3.0; en-us; Xoom Build/HRI39) AppleWebKit/525.10+ " + + "(KHTML, like Gecko) Version/3.0.4 Mobile Safari/523.12.2"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + + processor.execute(ingestDocument); + Map data = ingestDocument.getSourceAndMetadata(); + + assertThat(data, hasKey("target_field")); + Map target = (Map) data.get("target_field"); + + assertThat(target.get("name"), is("Android")); + assertThat(target.get("major"), is("3")); + assertThat(target.get("minor"), is("0")); + assertNull(target.get("patch")); + assertNull(target.get("build")); + + assertThat(target.get("os"), is("Android 3.0")); + assertThat(target.get("os_name"), is("Android")); + assertThat(target.get("os_major"), is("3")); + assertThat(target.get("os_minor"), is("0")); + + assertThat(target.get("device"), is("Motorola Xoom")); + } + + @SuppressWarnings("unchecked") + public void testSpider() throws Exception { + Map document = new HashMap<>(); + document.put("source_field", + "Mozilla/5.0 (compatible; EasouSpider; +http://www.easou.com/search/spider.html)"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + + processor.execute(ingestDocument); + Map data = ingestDocument.getSourceAndMetadata(); + + assertThat(data, hasKey("target_field")); + Map target = (Map) data.get("target_field"); + + assertThat(target.get("name"), is("EasouSpider")); + assertNull(target.get("major")); + assertNull(target.get("minor")); + assertNull(target.get("patch")); + assertNull(target.get("build")); + + assertThat(target.get("os"), is("Other")); + assertThat(target.get("os_name"), is("Other")); + assertNull(target.get("os_major")); + assertNull(target.get("os_minor")); + + assertThat(target.get("device"), is("Spider")); + } + + @SuppressWarnings("unchecked") + public void testUnknown() throws Exception { + Map document = new HashMap<>(); + document.put("source_field", + "Something I made up v42.0.1"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + + processor.execute(ingestDocument); + Map data = ingestDocument.getSourceAndMetadata(); + + assertThat(data, hasKey("target_field")); + Map target = (Map) data.get("target_field"); + + assertThat(target.get("name"), is("Other")); + assertNull(target.get("major")); + assertNull(target.get("minor")); + assertNull(target.get("patch")); + assertNull(target.get("build")); + + assertThat(target.get("os"), is("Other")); + assertThat(target.get("os_name"), is("Other")); + assertNull(target.get("os_major")); + assertNull(target.get("os_minor")); + + assertThat(target.get("device"), is("Other")); + } +} + diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/S3Module.java b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentRestIT.java similarity index 53% rename from plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/S3Module.java rename to plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentRestIT.java index 11294054c21..010f85f671a 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/S3Module.java +++ b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/UserAgentRestIT.java @@ -17,22 +17,24 @@ * under the License. */ -package org.elasticsearch.cloud.aws; +package org.elasticsearch.ingest.useragent; -import org.elasticsearch.common.inject.AbstractModule; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.test.rest.RestTestCandidate; +import org.elasticsearch.test.rest.parser.RestTestParseException; -public class S3Module extends AbstractModule { +import java.io.IOException; +public class UserAgentRestIT extends ESRestTestCase { - // pkg private so it is settable by tests - static Class s3ServiceImpl = InternalAwsS3Service.class; - - public static Class getS3ServiceImpl() { - return s3ServiceImpl; + public UserAgentRestIT(@Name("yaml") RestTestCandidate testCandidate) { + super(testCandidate); } - @Override - protected void configure() { - bind(AwsS3Service.class).to(s3ServiceImpl).asEagerSingleton(); + @ParametersFactory + public static Iterable parameters() throws IOException, RestTestParseException { + return ESRestTestCase.createParameters(0, 1); } } diff --git a/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yaml b/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yaml new file mode 100644 index 00000000000..fee3173f393 --- /dev/null +++ b/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yaml @@ -0,0 +1,11 @@ +"ingest-user-agent plugin installed": + - do: + cluster.state: {} + + - set: {master_node: master} + + - do: + nodes.info: {} + + - match: { nodes.$master.plugins.0.name: ingest-user-agent } + - match: { nodes.$master.ingest.processors.0.type: user_agent } diff --git a/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yaml b/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yaml new file mode 100644 index 00000000000..0964e69a99b --- /dev/null +++ b/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yaml @@ -0,0 +1,86 @@ +--- +"Test user agent processor with defaults": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "user_agent" : { + "field" : "field1" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } + - match: { _source.user_agent.name: "Chrome" } + - match: { _source.user_agent.os: "Mac OS X 10.9.2" } + - match: { _source.user_agent.os_name: "Mac OS X" } + - match: { _source.user_agent.os_major: "10" } + - match: { _source.user_agent.os_minor: "9" } + - match: { _source.user_agent.major: "33" } + - match: { _source.user_agent.minor: "0" } + - match: { _source.user_agent.patch: "1750" } + - match: { _source.user_agent.device: "Other" } + +--- +"Test user agent processor with parameters": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "user_agent" : { + "field" : "field1", + "target_field": "field2", + "properties": ["os"] + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } + - match: { _source.field2.os: "Mac OS X 10.9.2" } + - is_false: _source.user_agent + - is_false: _source.field2.name + - is_false: _source.field2.os_name + - is_false: _source.field2.os_major + - is_false: _source.field2.os_minor + - is_false: _source.field2.major + - is_false: _source.field2.minor + - is_false: _source.field2.patch + - is_false: _source.field2.device diff --git a/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yaml b/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yaml new file mode 100644 index 00000000000..5613145b664 --- /dev/null +++ b/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yaml @@ -0,0 +1,42 @@ +--- +"Test user agent processor with custom regex file": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "user_agent" : { + "field": "field1", + "regex_file": "test-regexes.yaml" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + type: test + id: 1 + pipeline: "my_pipeline" + body: {field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"} + + - do: + get: + index: test + type: test + id: 1 + - match: { _source.field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } + - match: { _source.user_agent.name: "Test" } + - match: { _source.user_agent.os: "Other" } + - match: { _source.user_agent.os_name: "Other" } + - match: { _source.user_agent.device: "Other" } + - is_false: _source.user_agent.os_major + - is_false: _source.user_agent.os_minor + - is_false: _source.user_agent.major + - is_false: _source.user_agent.minor + - is_false: _source.user_agent.patch diff --git a/plugins/ingest-user-agent/test/test-regexes.yaml b/plugins/ingest-user-agent/test/test-regexes.yaml new file mode 100644 index 00000000000..e41dec700c0 --- /dev/null +++ b/plugins/ingest-user-agent/test/test-regexes.yaml @@ -0,0 +1,3 @@ +user_agent_parsers: + - regex: '.*' + family_replacement: 'Test' \ No newline at end of file diff --git a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java index d5e0a62ecb5..b27c3fad2b8 100644 --- a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java +++ b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/ExampleCatAction.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.plugin.example; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -38,26 +38,26 @@ public class ExampleCatAction extends AbstractCatAction { private final ExamplePluginConfiguration config; @Inject - public ExampleCatAction(Settings settings, RestController controller, - Client client, ExamplePluginConfiguration config) { - super(settings, controller, client); + public ExampleCatAction(Settings settings, RestController controller, ExamplePluginConfiguration config) { + super(settings); this.config = config; controller.registerHandler(GET, "/_cat/configured_example", this); } @Override - protected void doRequest(final RestRequest request, final RestChannel channel, final Client client) { + protected void doRequest(final RestRequest request, final RestChannel channel, final NodeClient client) { Table table = getTableWithHeader(request); table.startRow(); table.addCell(config.getTestConfig()); table.endRow(); try { channel.sendResponse(RestTable.buildResponse(table, channel)); - } catch (Throwable e) { + } catch (Exception e) { try { channel.sendResponse(new BytesRestResponse(channel, e)); - } catch (Throwable e1) { - logger.error("failed to send failure response", e1); + } catch (Exception inner) { + inner.addSuppressed(e); + logger.error("failed to send failure response", inner); } } } diff --git a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java index 163d832d22c..271b404e043 100644 --- a/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java +++ b/plugins/jvm-example/src/main/java/org/elasticsearch/plugin/example/JvmExamplePlugin.java @@ -44,13 +44,13 @@ public class JvmExamplePlugin extends Plugin { } @Override - public Collection nodeModules() { + public Collection createGuiceModules() { return Collections.singletonList(new ConfiguredExampleModule()); } @Override @SuppressWarnings("rawtypes") // Plugin use a rawtype - public Collection> nodeServices() { + public Collection> getGuiceServiceClasses() { Collection> services = new ArrayList<>(); return services; } diff --git a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java index c2f1214e72b..634a4ca6dfa 100644 --- a/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java +++ b/plugins/lang-javascript/src/test/java/org/elasticsearch/script/javascript/JavaScriptScriptMultiThreadedTests.java @@ -35,9 +35,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; -/** - * - */ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { public void testExecutableNoRuntimeParams() throws Exception { final JavaScriptScriptEngineService se = new JavaScriptScriptEngineService(Settings.Builder.EMPTY_SETTINGS); @@ -64,9 +61,9 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { long result = ((Number) script.run()).longValue(); assertThat(result, equalTo(addition)); } - } catch (Throwable t) { + } catch (Exception e) { failed.set(true); - logger.error("failed", t); + logger.error("failed", e); } finally { latch.countDown(); } @@ -106,9 +103,9 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { long result = ((Number) script.run()).longValue(); assertThat(result, equalTo(addition)); } - } catch (Throwable t) { + } catch (Exception e) { failed.set(true); - logger.error("failed", t); + logger.error("failed", e); } finally { latch.countDown(); } @@ -147,9 +144,9 @@ public class JavaScriptScriptMultiThreadedTests extends ESTestCase { long result = ((Number) se.executable(new CompiledScript(ScriptService.ScriptType.INLINE, "testExecutableNoRuntimeParams", "js", compiled), runtimeVars).run()).longValue(); assertThat(result, equalTo(addition)); } - } catch (Throwable t) { + } catch (Exception e) { failed.set(true); - logger.error("failed", t); + logger.error("failed", e); } finally { latch.countDown(); } diff --git a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java index b642b7b0a73..5a16c06d4dc 100644 --- a/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java +++ b/plugins/lang-python/src/main/java/org/elasticsearch/script/python/PythonScriptEngineService.java @@ -261,7 +261,7 @@ public class PythonScriptEngineService extends AbstractComponent implements Scri } /** Evaluates with reduced privileges */ - private final PyObject evalRestricted(final PyCode code) { + private PyObject evalRestricted(final PyCode code) { // eval the script with reduced privileges return AccessController.doPrivileged(new PrivilegedAction() { @Override diff --git a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java index abf9f661a6d..0a887bc9a7e 100644 --- a/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java +++ b/plugins/lang-python/src/test/java/org/elasticsearch/script/python/PythonScriptMultiThreadedTests.java @@ -66,9 +66,9 @@ public class PythonScriptMultiThreadedTests extends ESTestCase { long result = ((Number) script.run()).longValue(); assertThat(result, equalTo(addition)); } - } catch (Throwable t) { + } catch (Exception e) { failed.set(true); - logger.error("failed", t); + logger.error("failed", e); } finally { latch.countDown(); } @@ -109,9 +109,9 @@ public class PythonScriptMultiThreadedTests extends ESTestCase { // long result = ((Number) script.run(runtimeVars)).longValue(); // assertThat(result, equalTo(addition)); // } -// } catch (Throwable t) { +// } catch (Exception e) { // failed.set(true); -// logger.error("failed", t); +// logger.error("failed", e); // } finally { // latch.countDown(); // } @@ -151,9 +151,9 @@ public class PythonScriptMultiThreadedTests extends ESTestCase { long result = ((Number) se.executable(compiledScript, runtimeVars).run()).longValue(); assertThat(result, equalTo(addition)); } - } catch (Throwable t) { + } catch (Exception e) { failed.set(true); - logger.error("failed", t); + logger.error("failed", e); } finally { latch.countDown(); } diff --git a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java index 79174e54c62..06e51686823 100644 --- a/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java +++ b/plugins/mapper-attachments/src/main/java/org/elasticsearch/mapper/attachments/AttachmentMapper.java @@ -482,7 +482,7 @@ public class AttachmentMapper extends FieldMapper { String parsedContent; try { parsedContent = TikaImpl.parse(content, metadata, indexedChars); - } catch (Throwable e) { + } catch (Exception e) { // #18: we could ignore errors when Tika does not parse data if (!ignoreErrors) { logger.trace("exception caught", e); @@ -508,8 +508,8 @@ public class AttachmentMapper extends FieldMapper { } context = context.createExternalValueContext(language); languageMapper.parse(context); - } catch(Throwable t) { - logger.debug("Cannot detect language: [{}]", t.getMessage()); + } catch(Exception e) { + logger.debug("Cannot detect language: [{}]", e.getMessage()); } } diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java index 8c86800e52b..6b80baa7c28 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/SimpleAttachmentMapperTests.java @@ -116,7 +116,7 @@ public class SimpleAttachmentMapperTests extends AttachmentUnitTestCase { .endObject() .endObject(); - byte[] mapping = mappingBuilder.bytes().toBytes(); + byte[] mapping = BytesReference.toBytes(mappingBuilder.bytes()); MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY, getIndicesModuleWithRegisteredAttachmentMapper()); DocumentMapper docMapper = mapperService.parse("mail", new CompressedXContent(mapping), true); // this should not throw an exception diff --git a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java index fbbdeb83a7d..b32a6ab79a0 100644 --- a/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java +++ b/plugins/mapper-attachments/src/test/java/org/elasticsearch/mapper/attachments/TikaDocTests.java @@ -58,7 +58,7 @@ public class TikaDocTests extends ESTestCase { assertNotNull(parsedContent); assertFalse(parsedContent.isEmpty()); logger.debug("extracted content: {}", parsedContent); - } catch (Throwable e) { + } catch (Exception e) { throw new RuntimeException("parsing of filename: " + fileName.getFileName() + " failed", e); } } diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/10_index.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/10_index.yaml index 3f3754109ee..5d95ce425cb 100644 --- a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/10_index.yaml +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/10_index.yaml @@ -14,10 +14,6 @@ properties: file: type: attachment - - do: - cluster.health: - wait_for_status: yellow - - do: catch: /(.)*mapper_parsing_exception.+No content is provided\.(.)*/ index: @@ -59,9 +55,6 @@ type: attachment file2: type: attachment - - do: - cluster.health: - wait_for_status: yellow - do: index: @@ -133,9 +126,6 @@ # type: attachment # file2: # type: attachment -# - do: -# cluster.health: -# wait_for_status: yellow # # - do: # catch: /(.)*mapper_parsing_exception(.)*The supplied password does not match either the owner or user password in the document\.(.)*/ diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/20_search.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/20_search.yaml index 95d9cef2cbf..bc178b17895 100644 --- a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/20_search.yaml +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/20_search.yaml @@ -11,9 +11,6 @@ setup: properties: file: type: attachment - - do: - cluster.health: - wait_for_status: yellow --- # Encoded content with https://www.base64encode.org/ diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/30_mapping.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/30_mapping.yaml index 458990cc90c..5aaa5a0796c 100644 --- a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/30_mapping.yaml +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/30_mapping.yaml @@ -32,9 +32,6 @@ "store": true "name": "store": true - - do: - cluster.health: - wait_for_status: yellow - do: index: @@ -54,7 +51,7 @@ search: index: test body: - fields: [file.content_type,file.name] + stored_fields: [file.content_type,file.name] - match: { hits.total: 1 } - match: { hits.hits.0.fields: { file.content_type: ["text/my-dummy-content-type"], file.name: ["my-dummy-name-txt"] }} diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/40_highlight.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/40_highlight.yaml index dc6f800c078..658887a9ce5 100644 --- a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/40_highlight.yaml +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/40_highlight.yaml @@ -17,10 +17,6 @@ setup: "store" : true "term_vector": "with_positions_offsets" - - do: - cluster.health: - wait_for_status: yellow - --- # Encoded content with https://www.base64encode.org/ # @@ -57,7 +53,7 @@ setup: query: match: file.content: "apache tika" - fields: [] + stored_fields: [] highlight: fields: file.content: {} diff --git a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/50_files_supported.yaml b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/50_files_supported.yaml index 69991b9d0c0..a0e1b600bf4 100644 --- a/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/50_files_supported.yaml +++ b/plugins/mapper-attachments/src/test/resources/rest-api-spec/test/mapper_attachments/50_files_supported.yaml @@ -20,9 +20,6 @@ setup: content_type: store: true - - do: - cluster.health: - wait_for_status: yellow --- "Test mapper attachment processor with .doc file": @@ -38,7 +35,7 @@ setup: search: index: test body: - fields: [file.content, file.author, file.date, file.content_length, file.content_type] + stored_fields: [file.content, file.author, file.date, file.content_length, file.content_type] - match: { hits.total: 1 } - match: { hits.hits.0.fields: { file.content: ["Test elasticsearch\n"], @@ -65,7 +62,7 @@ setup: search: index: test body: - fields: [file.content, file.author, file.date, file.content_length, file.content_type] + stored_fields: [file.content, file.author, file.date, file.content_length, file.content_type] - match: { hits.total: 1 } - match: { hits.hits.0.fields: { file.content: ["Test elasticsearch\n"], diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index 9c07c7b3eb3..c9c8972c62d 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -42,15 +42,15 @@ import java.util.Map; import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; public class SizeFieldMapper extends MetadataFieldMapper { - public static final String NAME = "_size"; - public static final String CONTENT_TYPE = "_size"; public static class Defaults { public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_DISABLED; - public static final MappedFieldType SIZE_FIELD_TYPE = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); - public static final MappedFieldType LEGACY_SIZE_FIELD_TYPE = LegacyIntegerFieldMapper.Defaults.FIELD_TYPE.clone(); + public static final MappedFieldType SIZE_FIELD_TYPE = + new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); + public static final MappedFieldType LEGACY_SIZE_FIELD_TYPE = + LegacyIntegerFieldMapper.Defaults.FIELD_TYPE.clone(); static { SIZE_FIELD_TYPE.setStored(true); @@ -68,14 +68,31 @@ public class SizeFieldMapper extends MetadataFieldMapper { } } + private static MappedFieldType defaultFieldType(Version indexCreated) { + MappedFieldType defaultFieldType; + if (indexCreated.before(Version.V_5_0_0_alpha2)) { + defaultFieldType = Defaults.LEGACY_SIZE_FIELD_TYPE.clone(); + // doc_values are disabled for bwc with indices created before V_5_0_0_alpha4 + defaultFieldType.setHasDocValues(false); + } else { + defaultFieldType = Defaults.SIZE_FIELD_TYPE.clone(); + if (indexCreated.onOrBefore(Version.V_5_0_0_alpha4)) { + // doc_values are disabled for bwc with indices created before V_5_0_0_alpha4 + defaultFieldType.setHasDocValues(false); + } else { + defaultFieldType.setHasDocValues(true); + } + } + return defaultFieldType; + } + public static class Builder extends MetadataFieldMapper.Builder { protected EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; private Builder(MappedFieldType existing, Version indexCreated) { - super(NAME, existing == null - ? indexCreated.before(Version.V_5_0_0_alpha2) ? Defaults.LEGACY_SIZE_FIELD_TYPE : Defaults.SIZE_FIELD_TYPE - : existing, Defaults.LEGACY_SIZE_FIELD_TYPE); + super(NAME, existing == null ? defaultFieldType(indexCreated) : existing.clone(), + defaultFieldType(indexCreated)); builder = this; } @@ -87,21 +104,27 @@ public class SizeFieldMapper extends MetadataFieldMapper { @Override public SizeFieldMapper build(BuilderContext context) { setupFieldType(context); - fieldType.setHasDocValues(false); + if (context.indexCreatedVersion().onOrBefore(Version.V_5_0_0_alpha4)) { + // Make sure that the doc_values are disabled on indices created before V_5_0_0_alpha4 + fieldType.setHasDocValues(false); + } return new SizeFieldMapper(enabledState, fieldType, context.indexSettings()); } } public static class TypeParser implements MetadataFieldMapper.TypeParser { @Override - public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - Builder builder = new Builder(parserContext.mapperService().fullName(NAME), parserContext.indexVersionCreated()); + public MetadataFieldMapper.Builder parse(String name, Map node, + ParserContext parserContext) throws MapperParsingException { + Builder builder = new Builder(parserContext.mapperService().fullName(NAME), + parserContext.indexVersionCreated()); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); String fieldName = entry.getKey(); Object fieldNode = entry.getValue(); if (fieldName.equals("enabled")) { - builder.enabled(lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); + builder.enabled(lenientNodeBooleanValue(fieldNode) ? + EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); iterator.remove(); } } @@ -116,14 +139,15 @@ public class SizeFieldMapper extends MetadataFieldMapper { private EnabledAttributeMapper enabledState; - private SizeFieldMapper(Settings indexSettings, MappedFieldType mappedFieldType) { - this(Defaults.ENABLED_STATE, mappedFieldType == null ? Defaults.LEGACY_SIZE_FIELD_TYPE : mappedFieldType, indexSettings); + private SizeFieldMapper(Settings indexSettings, MappedFieldType existing) { + this(Defaults.ENABLED_STATE, + existing == null ? defaultFieldType(Version.indexCreated(indexSettings)) : existing.clone(), + indexSettings); } private SizeFieldMapper(EnabledAttributeMapper enabled, MappedFieldType fieldType, Settings indexSettings) { - super(NAME, fieldType, Defaults.LEGACY_SIZE_FIELD_TYPE, indexSettings); + super(NAME, fieldType, defaultFieldType(Version.indexCreated(indexSettings)), indexSettings); this.enabledState = enabled; - } @Override diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java index 761fb5fd144..7cbce102c57 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java @@ -67,7 +67,8 @@ public class SizeFieldMapperUpgradeTests extends ESIntegTestCase { Settings settings = Settings.builder() .put(Environment.PATH_DATA_SETTING.getKey(), dataPath) .build(); - final String node = internalCluster().startDataOnlyNode(settings); // workaround for dangling index loading issue when node is master + // workaround for dangling index loading issue when node is master + final String node = internalCluster().startDataOnlyNode(settings); Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); assertEquals(1, nodePaths.length); dataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER); @@ -83,8 +84,8 @@ public class SizeFieldMapperUpgradeTests extends ESIntegTestCase { ElasticsearchAssertions.assertHitCount(countResponse, 3L); final SearchResponse sizeResponse = client().prepareSearch(indexName) - .addField("_source") - .addField("_size") + .addStoredField("_source") + .addStoredField("_size") .get(); ElasticsearchAssertions.assertHitCount(sizeResponse, 3L); for (SearchHit hit : sizeResponse.getHits().getHits()) { diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java index b6c341c0601..279c5c96091 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java @@ -49,15 +49,19 @@ public class SizeMappingIT extends ESIntegTestCase { String index = "foo"; String type = "mytype"; - XContentBuilder builder = jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); + XContentBuilder builder = + jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); // check mapping again assertSizeMappingEnabled(index, type, true); // update some field in the mapping - XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("properties").startObject("otherField").field("type", "text").endObject().endObject().endObject(); - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); + XContentBuilder updateMappingBuilder = + jsonBuilder().startObject().startObject("properties").startObject("otherField").field("type", "text") + .endObject().endObject().endObject(); + PutMappingResponse putMappingResponse = + client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); assertAcked(putMappingResponse); // make sure size field is still in mapping @@ -68,15 +72,18 @@ public class SizeMappingIT extends ESIntegTestCase { String index = "foo"; String type = "mytype"; - XContentBuilder builder = jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); + XContentBuilder builder = + jsonBuilder().startObject().startObject("_size").field("enabled", true).endObject().endObject(); assertAcked(client().admin().indices().prepareCreate(index).addMapping(type, builder)); // check mapping again assertSizeMappingEnabled(index, type, true); // update some field in the mapping - XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("_size").field("enabled", false).endObject().endObject(); - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); + XContentBuilder updateMappingBuilder = + jsonBuilder().startObject().startObject("_size").field("enabled", false).endObject().endObject(); + PutMappingResponse putMappingResponse = + client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); assertAcked(putMappingResponse); // make sure size field is still in mapping @@ -84,8 +91,10 @@ public class SizeMappingIT extends ESIntegTestCase { } private void assertSizeMappingEnabled(String index, String type, boolean enabled) throws IOException { - String errMsg = String.format(Locale.ROOT, "Expected size field mapping to be " + (enabled ? "enabled" : "disabled") + " for %s/%s", index, type); - GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes(type).get(); + String errMsg = String.format(Locale.ROOT, + "Expected size field mapping to be " + (enabled ? "enabled" : "disabled") + " for %s/%s", index, type); + GetMappingsResponse getMappingsResponse = + client().admin().indices().prepareGetMappings(index).addTypes(type).get(); Map mappingSource = getMappingsResponse.getMappings().get(index).get(type).getSourceAsMap(); assertThat(errMsg, mappingSource, hasKey("_size")); String sizeAsString = mappingSource.get("_size").toString(); diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index b0802a955df..8cc01aba4bb 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -19,58 +19,48 @@ package org.elasticsearch.index.mapper.size; -import java.util.Collections; -import java.util.Map; +import java.util.Collection; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; -import org.elasticsearch.indices.IndicesModule; -import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.core.LegacyNumberFieldMapper; +import org.elasticsearch.index.mapper.core.NumberFieldMapper; +import org.elasticsearch.plugin.mapper.MapperSizePlugin; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; -import org.junit.Before; - +import org.elasticsearch.test.InternalSettingsPlugin; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.instanceOf; import org.apache.lucene.index.IndexableField; public class SizeMappingTests extends ESSingleNodeTestCase { - - IndexService indexService; - MapperService mapperService; - DocumentMapperParser parser; - - @Before - public void before() { - indexService = createIndex("test"); - IndicesModule indices = newTestIndicesModule(Collections.emptyMap(), - Collections.singletonMap(SizeFieldMapper.NAME, new SizeFieldMapper.TypeParser()) - ); - mapperService = new MapperService(indexService.getIndexSettings(), indexService.analysisService(), indexService.similarityService(), indices.getMapperRegistry(), indexService::newQueryShardContext); - parser = mapperService.documentMapperParser(); + @Override + protected Collection> getPlugins() { + return pluginList(MapperSizePlugin.class, InternalSettingsPlugin.class); } public void testSizeEnabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_size").field("enabled", true).endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); + IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true"); + DocumentMapper docMapper = service.mapperService().documentMapper("type"); BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); + .startObject() + .field("field", "value") + .endObject() + .bytes(); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source)); boolean stored = false; @@ -84,47 +74,82 @@ public class SizeMappingTests extends ESSingleNodeTestCase { } public void testSizeDisabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_size").field("enabled", false).endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); + IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=false"); + DocumentMapper docMapper = service.mapperService().documentMapper("type"); BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); + .startObject() + .field("field", "value") + .endObject() + .bytes(); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source)); assertThat(doc.rootDoc().getField("_size"), nullValue()); } public void testSizeNotSet() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .endObject().endObject().string(); - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); + IndexService service = createIndex("test", Settings.EMPTY, "type"); + DocumentMapper docMapper = service.mapperService().documentMapper("type"); BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); + .startObject() + .field("field", "value") + .endObject() + .bytes(); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source)); assertThat(doc.rootDoc().getField("_size"), nullValue()); } public void testThatDisablingWorksWhenMerging() throws Exception { - String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_size").field("enabled", true).endObject() - .endObject().endObject().string(); - DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); + IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true"); + DocumentMapper docMapper = service.mapperService().documentMapper("type"); + assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(true)); String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_size").field("enabled", false).endObject() - .endObject().endObject().string(); - DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); + .startObject("_size").field("enabled", false).endObject() + .endObject().endObject().string(); + docMapper = service.mapperService().merge("type", new CompressedXContent(disabledMapping), + MapperService.MergeReason.MAPPING_UPDATE, false); - assertThat(disabledMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); + assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); } + + public void testBWCMapper() throws Exception { + { + // IntPoint && docvalues=true for V_5_0_0_alpha5 + IndexService service = createIndex("foo", Settings.EMPTY, "bar", "_size", "enabled=true"); + DocumentMapper docMapper = service.mapperService().documentMapper("bar"); + SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class); + assertThat(mapper.enabled(), is(true)); + MappedFieldType ft = mapper.fieldType(); + assertThat(ft.hasDocValues(), is(true)); + assertThat(mapper.fieldType(), instanceOf(NumberFieldMapper.NumberFieldType.class)); + } + + { + // IntPoint with docvalues=false if version > V_5_0_0_alpha2 && version < V_5_0_0_beta1 + IndexService service = createIndex("foo2", + Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0_alpha4.id).build(), + "bar", "_size", "enabled=true"); + DocumentMapper docMapper = service.mapperService().documentMapper("bar"); + SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class); + assertThat(mapper.enabled(), is(true)); + assertThat(mapper.fieldType().hasDocValues(), is(false)); + assertThat(mapper.fieldType(), instanceOf(NumberFieldMapper.NumberFieldType.class)); + } + + { + // LegacyIntField with docvalues=false if version < V_5_0_0_alpha2 + IndexService service = createIndex("foo3", + Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0_alpha1.id).build(), + "bar", "_size", "enabled=true"); + DocumentMapper docMapper = service.mapperService().documentMapper("bar"); + SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class); + assertThat(mapper.enabled(), is(true)); + assertThat(mapper.fieldType().hasDocValues(), is(false)); + assertThat(mapper.fieldType(), instanceOf(LegacyNumberFieldMapper.NumberFieldType.class)); + } + } + } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/AzureRepositoryModule.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/AzureRepositoryModule.java deleted file mode 100644 index 92a87ff8ee1..00000000000 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/AzureRepositoryModule.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cloud.azure; - -import org.elasticsearch.cloud.azure.storage.AzureStorageService; -import org.elasticsearch.cloud.azure.storage.AzureStorageServiceImpl; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; - -/** - * Azure Module - * - *

    - *
  • If needed this module will bind azure repository service by default - * to AzureStorageServiceImpl.
  • - *
- * - * @see org.elasticsearch.cloud.azure.storage.AzureStorageServiceImpl - */ -public class AzureRepositoryModule extends AbstractModule { - protected final ESLogger logger; - - // pkg private so it is settable by tests - static Class storageServiceImpl = AzureStorageServiceImpl.class; - - @Inject - public AzureRepositoryModule(Settings settings) { - this.logger = Loggers.getLogger(getClass(), settings); - } - - @Override - protected void configure() { - // If we have settings for azure repository, let's start the azure storage service - logger.debug("starting azure repository service"); - bind(AzureStorageService.class).to(storageServiceImpl).asEagerSingleton(); - } -} diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java index 2809b8588f1..b77cebfa2f2 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/blobstore/AzureBlobStore.java @@ -23,17 +23,16 @@ import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.StorageException; import org.elasticsearch.cloud.azure.storage.AzureStorageService; import org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.repositories.RepositoryName; -import org.elasticsearch.repositories.RepositorySettings; +import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URISyntaxException; @@ -52,16 +51,15 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore { private final String container; private final String repositoryName; - @Inject - public AzureBlobStore(RepositoryName name, Settings settings, RepositorySettings repositorySettings, + public AzureBlobStore(RepositoryMetaData metadata, Settings settings, AzureStorageService client) throws URISyntaxException, StorageException { super(settings); - this.client = client.start(); - this.container = getValue(repositorySettings, Repository.CONTAINER_SETTING, Storage.CONTAINER_SETTING); - this.repositoryName = name.getName(); - this.accountName = getValue(repositorySettings, Repository.ACCOUNT_SETTING, Storage.ACCOUNT_SETTING); + this.client = client; + this.container = getValue(metadata.settings(), settings, Repository.CONTAINER_SETTING, Storage.CONTAINER_SETTING); + this.repositoryName = metadata.name(); + this.accountName = getValue(metadata.settings(), settings, Repository.ACCOUNT_SETTING, Storage.ACCOUNT_SETTING); - String modeStr = getValue(repositorySettings, Repository.LOCATION_MODE_SETTING, Storage.LOCATION_MODE_SETTING); + String modeStr = getValue(metadata.settings(), settings, Repository.LOCATION_MODE_SETTING, Storage.LOCATION_MODE_SETTING); if (Strings.hasLength(modeStr)) { this.locMode = LocationMode.valueOf(modeStr.toUpperCase(Locale.ROOT)); } else { @@ -127,7 +125,7 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore { this.client.deleteBlob(this.accountName, this.locMode, container, blob); } - public InputStream getInputStream(String container, String blob) throws URISyntaxException, StorageException + public InputStream getInputStream(String container, String blob) throws URISyntaxException, StorageException, IOException { return this.client.getInputStream(this.accountName, this.locMode, container, blob); } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java index 13db36aeb5a..3e854ab9c70 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageService.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; +import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URISyntaxException; @@ -75,7 +76,7 @@ public interface AzureStorageService { void deleteBlob(String account, LocationMode mode, String container, String blob) throws URISyntaxException, StorageException; InputStream getInputStream(String account, LocationMode mode, String container, String blob) - throws URISyntaxException, StorageException; + throws URISyntaxException, StorageException, IOException; OutputStream getOutputStream(String account, LocationMode mode, String container, String blob) throws URISyntaxException, StorageException; @@ -85,6 +86,4 @@ public interface AzureStorageService { void moveBlob(String account, LocationMode mode, String container, String sourceBlob, String targetBlob) throws URISyntaxException, StorageException; - - AzureStorageService start(); } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java index 09dbc6520a1..17ff0780a50 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceImpl.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; @@ -45,15 +46,13 @@ import java.net.URISyntaxException; import java.util.HashMap; import java.util.Map; -public class AzureStorageServiceImpl extends AbstractLifecycleComponent - implements AzureStorageService { +public class AzureStorageServiceImpl extends AbstractComponent implements AzureStorageService { final AzureStorageSettings primaryStorageSettings; final Map secondariesStorageSettings; final Map clients; - @Inject public AzureStorageServiceImpl(Settings settings) { super(settings); @@ -62,6 +61,20 @@ public class AzureStorageServiceImpl extends AbstractLifecycleComponent(); + + logger.debug("starting azure storage client instance"); + + // We register the primary client if any + if (primaryStorageSettings != null) { + logger.debug("registering primary client for account [{}]", primaryStorageSettings.getAccount()); + createClient(primaryStorageSettings); + } + + // We register all secondary clients + for (Map.Entry azureStorageSettingsEntry : secondariesStorageSettings.entrySet()) { + logger.debug("registering secondary client for account [{}]", azureStorageSettingsEntry.getKey()); + createClient(azureStorageSettingsEntry.getValue()); + } } void createClient(AzureStorageSettings azureStorageSettings) { @@ -302,32 +315,4 @@ public class AzureStorageServiceImpl extends AbstractLifecycleComponent done", container, sourceBlob, targetBlob); } } - - @Override - protected void doStart() throws ElasticsearchException { - logger.debug("starting azure storage client instance"); - - // We register the primary client if any - if (primaryStorageSettings != null) { - logger.debug("registering primary client for account [{}]", primaryStorageSettings.getAccount()); - createClient(primaryStorageSettings); - } - - // We register all secondary clients - for (Map.Entry azureStorageSettingsEntry : secondariesStorageSettings.entrySet()) { - logger.debug("registering secondary client for account [{}]", azureStorageSettingsEntry.getKey()); - createClient(azureStorageSettingsEntry.getValue()); - } - } - - @Override - protected void doStop() throws ElasticsearchException { - logger.debug("stopping azure storage client instance"); - // We should stop all clients but it does sound like CloudBlobClient has - // any shutdown method... - } - - @Override - protected void doClose() throws ElasticsearchException { - } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java index f64ffed4a03..6d1ed0c1049 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/cloud/azure/storage/AzureStorageSettings.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.repositories.RepositorySettings; import java.util.ArrayList; import java.util.Collections; @@ -172,20 +171,21 @@ public final class AzureStorageSettings { return Collections.unmodifiableMap(secondaries); } - public static T getValue(RepositorySettings repositorySettings, + public static T getValue(Settings repositorySettings, + Settings globalSettings, Setting repositorySetting, Setting repositoriesSetting) { - if (repositorySetting.exists(repositorySettings.settings())) { - return repositorySetting.get(repositorySettings.settings()); + if (repositorySetting.exists(repositorySettings)) { + return repositorySetting.get(repositorySettings); } else { - return repositoriesSetting.get(repositorySettings.globalSettings()); + return repositoriesSetting.get(globalSettings); } } - public static Setting getEffectiveSetting(RepositorySettings repositorySettings, + public static Setting getEffectiveSetting(Settings repositorySettings, Setting repositorySetting, Setting repositoriesSetting) { - if (repositorySetting.exists(repositorySettings.settings())) { + if (repositorySetting.exists(repositorySettings)) { return repositorySetting; } else { return repositoriesSetting; diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java b/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java index b04b613df21..fcd7bf96b2c 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/plugin/repository/azure/AzureRepositoryPlugin.java @@ -19,44 +19,37 @@ package org.elasticsearch.plugin.repository.azure; -import org.elasticsearch.cloud.azure.AzureRepositoryModule; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; + import org.elasticsearch.cloud.azure.storage.AzureStorageService; -import org.elasticsearch.common.inject.Module; +import org.elasticsearch.cloud.azure.storage.AzureStorageServiceImpl; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesModule; +import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.azure.AzureRepository; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.List; - /** - * + * A plugin to add a repository type that writes to and from the Azure cloud storage service. */ -public class AzureRepositoryPlugin extends Plugin { +public class AzureRepositoryPlugin extends Plugin implements RepositoryPlugin { - private final Settings settings; - protected final ESLogger logger = Loggers.getLogger(AzureRepositoryPlugin.class); - - public AzureRepositoryPlugin(Settings settings) { - this.settings = settings; - logger.trace("starting azure repository plugin..."); + // overridable for tests + protected AzureStorageService createStorageService(Settings settings) { + return new AzureStorageServiceImpl(settings); } @Override - public Collection nodeModules() { - return Collections.singletonList((Module) new AzureRepositoryModule(settings)); - } - - public void onModule(RepositoriesModule module) { - logger.debug("registering repository type [{}]", AzureRepository.TYPE); - module.registerRepository(AzureRepository.TYPE, AzureRepository.class, BlobStoreIndexShardRepository.class); + public Map getRepositories(Environment env) { + return Collections.singletonMap(AzureRepository.TYPE, + (metadata) -> new AzureRepository(metadata, env, createStorageService(env.settings()))); } @Override diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 4d3459cdcd4..4b4f7d6ae8e 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -19,34 +19,33 @@ package org.elasticsearch.repositories.azure; -import com.microsoft.azure.storage.LocationMode; -import com.microsoft.azure.storage.StorageException; -import org.elasticsearch.cloud.azure.blobstore.AzureBlobStore; -import org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.snapshots.SnapshotId; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.blobstore.BlobPath; -import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.common.settings.SettingsException; -import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.index.snapshots.IndexShardRepository; -import org.elasticsearch.repositories.RepositoryName; -import org.elasticsearch.repositories.RepositorySettings; -import org.elasticsearch.repositories.RepositoryVerificationException; -import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import org.elasticsearch.snapshots.SnapshotCreationException; - import java.io.IOException; import java.net.URISyntaxException; import java.util.List; import java.util.Locale; import java.util.function.Function; +import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.StorageException; +import org.elasticsearch.cloud.azure.blobstore.AzureBlobStore; +import org.elasticsearch.cloud.azure.storage.AzureStorageService; +import org.elasticsearch.cloud.azure.storage.AzureStorageService.Storage; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.SettingsException; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.env.Environment; +import org.elasticsearch.repositories.RepositoryVerificationException; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.snapshots.SnapshotCreationException; +import org.elasticsearch.snapshots.SnapshotId; + import static org.elasticsearch.cloud.azure.storage.AzureStorageSettings.getEffectiveSetting; import static org.elasticsearch.cloud.azure.storage.AzureStorageSettings.getValue; @@ -65,7 +64,7 @@ public class AzureRepository extends BlobStoreRepository { private static final ByteSizeValue MAX_CHUNK_SIZE = new ByteSizeValue(64, ByteSizeUnit.MB); - public final static String TYPE = "azure"; + public static final String TYPE = "azure"; public static final class Repository { public static final Setting ACCOUNT_SETTING = Setting.simpleString("account", Property.NodeScope); @@ -84,25 +83,22 @@ public class AzureRepository extends BlobStoreRepository { private final boolean compress; private final boolean readonly; - @Inject - public AzureRepository(RepositoryName name, RepositorySettings repositorySettings, - IndexShardRepository indexShardRepository, - AzureBlobStore azureBlobStore) throws IOException, URISyntaxException, StorageException { - super(name.getName(), repositorySettings, indexShardRepository); + public AzureRepository(RepositoryMetaData metadata, Environment environment, AzureStorageService storageService) + throws IOException, URISyntaxException, StorageException { + super(metadata, environment.settings()); - String container = getValue(repositorySettings, Repository.CONTAINER_SETTING, Storage.CONTAINER_SETTING); - - this.blobStore = azureBlobStore; - ByteSizeValue configuredChunkSize = getValue(repositorySettings, Repository.CHUNK_SIZE_SETTING, Storage.CHUNK_SIZE_SETTING); + blobStore = new AzureBlobStore(metadata, environment.settings(), storageService); + String container = getValue(metadata.settings(), settings, Repository.CONTAINER_SETTING, Storage.CONTAINER_SETTING); + ByteSizeValue configuredChunkSize = getValue(metadata.settings(), settings, Repository.CHUNK_SIZE_SETTING, Storage.CHUNK_SIZE_SETTING); if (configuredChunkSize.getMb() > MAX_CHUNK_SIZE.getMb()) { - Setting setting = getEffectiveSetting(repositorySettings, Repository.CHUNK_SIZE_SETTING, Storage.CHUNK_SIZE_SETTING); + Setting setting = getEffectiveSetting(metadata.settings(), Repository.CHUNK_SIZE_SETTING, Storage.CHUNK_SIZE_SETTING); throw new SettingsException("[" + setting.getKey() + "] must not exceed [" + MAX_CHUNK_SIZE + "] but is set to [" + configuredChunkSize + "]."); } else { this.chunkSize = configuredChunkSize; } - this.compress = getValue(repositorySettings, Repository.COMPRESS_SETTING, Storage.COMPRESS_SETTING); - String modeStr = getValue(repositorySettings, Repository.LOCATION_MODE_SETTING, Storage.LOCATION_MODE_SETTING); + this.compress = getValue(metadata.settings(), settings, Repository.COMPRESS_SETTING, Storage.COMPRESS_SETTING); + String modeStr = getValue(metadata.settings(), settings, Repository.LOCATION_MODE_SETTING, Storage.LOCATION_MODE_SETTING); if (Strings.hasLength(modeStr)) { LocationMode locationMode = LocationMode.valueOf(modeStr.toUpperCase(Locale.ROOT)); readonly = locationMode == LocationMode.SECONDARY_ONLY; @@ -110,7 +106,7 @@ public class AzureRepository extends BlobStoreRepository { readonly = false; } - String basePath = getValue(repositorySettings, Repository.BASE_PATH_SETTING, Storage.BASE_PATH_SETTING); + String basePath = getValue(metadata.settings(), settings, Repository.BASE_PATH_SETTING, Storage.BASE_PATH_SETTING); if (Strings.hasLength(basePath)) { // Remove starting / if any @@ -157,16 +153,16 @@ public class AzureRepository extends BlobStoreRepository { } @Override - public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData metaData) { + public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData clusterMetadata) { try { if (!blobStore.doesContainerExist(blobStore.container())) { logger.debug("container [{}] does not exist. Creating...", blobStore.container()); blobStore.createContainer(blobStore.container()); } - super.initializeSnapshot(snapshotId, indices, metaData); + super.initializeSnapshot(snapshotId, indices, clusterMetadata); } catch (StorageException | URISyntaxException e) { logger.warn("can not initialize container [{}]: [{}]", blobStore.container(), e.getMessage()); - throw new SnapshotCreationException(repositoryName, snapshotId, e); + throw new SnapshotCreationException(getMetadata().name(), snapshotId, e); } } @@ -180,14 +176,14 @@ public class AzureRepository extends BlobStoreRepository { } } catch (StorageException | URISyntaxException e) { logger.warn("can not initialize container [{}]: [{}]", blobStore.container(), e.getMessage()); - throw new RepositoryVerificationException(repositoryName, "can not initialize container " + blobStore.container(), e); + throw new RepositoryVerificationException(getMetadata().name(), "can not initialize container " + blobStore.container(), e); } } return super.startVerification(); } @Override - public boolean readOnly() { + public boolean isReadOnly() { return readonly; } } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceIntegTestCase.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceIntegTestCase.java index 82af834cd35..dd8251c3d2b 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceIntegTestCase.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/AbstractAzureRepositoryServiceIntegTestCase.java @@ -40,9 +40,12 @@ import java.util.Collection; public abstract class AbstractAzureRepositoryServiceIntegTestCase extends AbstractAzureIntegTestCase { - public static class TestPlugin extends Plugin { - public void onModule(AzureRepositoryModule azureRepositoryModule) { - AzureRepositoryModule.storageServiceImpl = AzureStorageServiceMock.class; + private static final AzureStorageService storageService = new AzureStorageServiceMock(); + + public static class TestPlugin extends AzureRepositoryPlugin { + @Override + protected AzureStorageService createStorageService(Settings settings) { + return storageService; } } @@ -78,7 +81,7 @@ public abstract class AbstractAzureRepositoryServiceIntegTestCase extends Abstra @Override protected Collection> nodePlugins() { - return pluginList(AzureRepositoryPlugin.class, TestPlugin.class, MockFSIndexStore.TestPlugin.class); + return pluginList(TestPlugin.class, MockFSIndexStore.TestPlugin.class); } @Override @@ -104,7 +107,6 @@ public abstract class AbstractAzureRepositoryServiceIntegTestCase extends Abstra public void cleanRepositoryFiles(String path) throws StorageException, URISyntaxException { String container = internalCluster().getInstance(Settings.class).get("repositories.azure.container"); logger.info("--> remove blobs in container [{}]", container); - AzureStorageService client = internalCluster().getInstance(AzureStorageService.class); - client.deleteFiles(null, LocationMode.PRIMARY_ONLY, container, path); + storageService.deleteFiles(null, LocationMode.PRIMARY_ONLY, container, path); } } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java index 8160c560325..4ed365b5ac8 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceMock.java @@ -25,12 +25,15 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.io.FileNotFoundException; +import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URISyntaxException; @@ -41,14 +44,12 @@ import java.util.concurrent.ConcurrentHashMap; /** * In memory storage for unit tests */ -public class AzureStorageServiceMock extends AbstractLifecycleComponent - implements AzureStorageService { +public class AzureStorageServiceMock extends AbstractComponent implements AzureStorageService { protected Map blobs = new ConcurrentHashMap<>(); - @Inject - public AzureStorageServiceMock(Settings settings) { - super(settings); + public AzureStorageServiceMock() { + super(Settings.EMPTY); } @Override @@ -79,7 +80,10 @@ public class AzureStorageServiceMock extends AbstractLifecycleComponent blobNames) throws IOException { - blobStore.deleteBlobs(buildKeys(blobNames)); - } - @Override public void move(String sourceBlobName, String targetBlobName) throws IOException { blobStore.moveBlob(buildKey(sourceBlobName), buildKey(targetBlobName)); @@ -103,12 +93,4 @@ public class GoogleCloudStorageBlobContainer extends AbstractBlobContainer { assert blobName != null; return path + blobName; } - - protected Set buildKeys(Collection blobNames) { - Set keys = new HashSet<>(); - if (blobNames != null) { - keys.addAll(blobNames.stream().map(this::buildKey).collect(Collectors.toList())); - } - return keys; - } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStoragePlugin.java index 77d8e23c9e5..c0fa38e8b57 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStoragePlugin.java @@ -19,6 +19,12 @@ package org.elasticsearch.plugin.repository.gcs; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Collection; +import java.util.Collections; +import java.util.Map; + import com.google.api.client.auth.oauth2.TokenRequest; import com.google.api.client.auth.oauth2.TokenResponse; import com.google.api.client.googleapis.json.GoogleJsonError; @@ -35,17 +41,16 @@ import com.google.api.services.storage.model.Objects; import com.google.api.services.storage.model.StorageObject; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.inject.Module; -import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.RepositoriesModule; +import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.gcs.GoogleCloudStorageRepository; +import org.elasticsearch.repositories.gcs.GoogleCloudStorageService; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.Collection; -import java.util.Collections; - -public class GoogleCloudStoragePlugin extends Plugin { +public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin { public static final String NAME = "repository-gcs"; @@ -109,13 +114,14 @@ public class GoogleCloudStoragePlugin extends Plugin { }); } - @Override - public Collection nodeModules() { - return Collections.singletonList(new GoogleCloudStorageModule()); + // overridable for tests + protected GoogleCloudStorageService createStorageService(Environment environment) { + return new GoogleCloudStorageService.InternalGoogleCloudStorageService(environment); } - public void onModule(RepositoriesModule repositoriesModule) { - repositoriesModule.registerRepository(GoogleCloudStorageRepository.TYPE, - GoogleCloudStorageRepository.class, BlobStoreIndexShardRepository.class); + @Override + public Map getRepositories(Environment env) { + return Collections.singletonMap(GoogleCloudStorageRepository.TYPE, + (metadata) -> new GoogleCloudStorageRepository(metadata, env, createStorageService(env))); } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java index 3616e18e83d..f7b74d5a4f8 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -20,20 +20,18 @@ package org.elasticsearch.repositories.gcs; import com.google.api.services.storage.Storage; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.gcs.GoogleCloudStorageBlobStore; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.index.snapshots.IndexShardRepository; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugin.repository.gcs.GoogleCloudStoragePlugin; import org.elasticsearch.repositories.RepositoryException; -import org.elasticsearch.repositories.RepositoryName; -import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import java.util.function.Function; @@ -73,17 +71,15 @@ public class GoogleCloudStorageRepository extends BlobStoreRepository { private final BlobPath basePath; private final GoogleCloudStorageBlobStore blobStore; - @Inject - public GoogleCloudStorageRepository(RepositoryName repositoryName, RepositorySettings repositorySettings, - IndexShardRepository indexShardRepository, + public GoogleCloudStorageRepository(RepositoryMetaData metadata, Environment environment, GoogleCloudStorageService storageService) throws Exception { - super(repositoryName.getName(), repositorySettings, indexShardRepository); + super(metadata, environment.settings()); - String bucket = get(BUCKET, repositoryName, repositorySettings); - String application = get(APPLICATION_NAME, repositoryName, repositorySettings); - String serviceAccount = get(SERVICE_ACCOUNT, repositoryName, repositorySettings); + String bucket = get(BUCKET, metadata); + String application = get(APPLICATION_NAME, metadata); + String serviceAccount = get(SERVICE_ACCOUNT, metadata); - String basePath = BASE_PATH.get(repositorySettings.settings()); + String basePath = BASE_PATH.get(metadata.settings()); if (Strings.hasLength(basePath)) { BlobPath path = new BlobPath(); for (String elem : basePath.split("/")) { @@ -97,18 +93,18 @@ public class GoogleCloudStorageRepository extends BlobStoreRepository { TimeValue connectTimeout = null; TimeValue readTimeout = null; - TimeValue timeout = HTTP_CONNECT_TIMEOUT.get(repositorySettings.settings()); + TimeValue timeout = HTTP_CONNECT_TIMEOUT.get(metadata.settings()); if ((timeout != null) && (timeout.millis() != NO_TIMEOUT.millis())) { connectTimeout = timeout; } - timeout = HTTP_READ_TIMEOUT.get(repositorySettings.settings()); + timeout = HTTP_READ_TIMEOUT.get(metadata.settings()); if ((timeout != null) && (timeout.millis() != NO_TIMEOUT.millis())) { readTimeout = timeout; } - this.compress = get(COMPRESS, repositoryName, repositorySettings); - this.chunkSize = get(CHUNK_SIZE, repositoryName, repositorySettings); + this.compress = get(COMPRESS, metadata); + this.chunkSize = get(CHUNK_SIZE, metadata); logger.debug("using bucket [{}], base_path [{}], chunk_size [{}], compress [{}], application [{}]", bucket, basePath, chunkSize, compress, application); @@ -141,13 +137,13 @@ public class GoogleCloudStorageRepository extends BlobStoreRepository { /** * Get a given setting from the repository settings, throwing a {@link RepositoryException} if the setting does not exist or is empty. */ - static T get(Setting setting, RepositoryName name, RepositorySettings repositorySettings) { - T value = setting.get(repositorySettings.settings()); + static T get(Setting setting, RepositoryMetaData metadata) { + T value = setting.get(metadata.settings()); if (value == null) { - throw new RepositoryException(name.getName(), "Setting [" + setting.getKey() + "] is not defined for repository"); + throw new RepositoryException(metadata.name(), "Setting [" + setting.getKey() + "] is not defined for repository"); } if ((value instanceof String) && (Strings.hasText((String) value)) == false) { - throw new RepositoryException(name.getName(), "Setting [" + setting.getKey() + "] is empty for repository"); + throw new RepositoryException(metadata.name(), "Setting [" + setting.getKey() + "] is empty for repository"); } return value; } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index 098ce5f1504..55e7813de42 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -67,9 +67,8 @@ public interface GoogleCloudStorageService { private final Environment environment; - @Inject - public InternalGoogleCloudStorageService(Settings settings, Environment environment) { - super(settings); + public InternalGoogleCloudStorageService(Environment environment) { + super(environment.settings()); this.environment = environment; } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index c5b57ba6cd6..f9548e7e2ea 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -21,18 +21,16 @@ package org.elasticsearch.repositories.gcs; import com.google.api.services.storage.Storage; import org.elasticsearch.common.blobstore.gcs.MockHttpTransport; -import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.plugin.repository.gcs.GoogleCloudStorageModule; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugin.repository.gcs.GoogleCloudStoragePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.ESBlobStoreRepositoryIntegTestCase; import org.junit.BeforeClass; import java.util.Collection; -import java.util.Collections; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -68,25 +66,13 @@ public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepos } public static class MockGoogleCloudStoragePlugin extends GoogleCloudStoragePlugin { - - public MockGoogleCloudStoragePlugin() { - } - @Override - public Collection nodeModules() { - return Collections.singletonList(new MockGoogleCloudStorageModule()); - } - } - - public static class MockGoogleCloudStorageModule extends GoogleCloudStorageModule { - @Override - protected void configure() { - bind(GoogleCloudStorageService.class).to(MockGoogleCloudStorageService.class).asEagerSingleton(); + protected GoogleCloudStorageService createStorageService(Environment environment) { + return new MockGoogleCloudStorageService(); } } public static class MockGoogleCloudStorageService implements GoogleCloudStorageService { - @Override public Storage createClient(String serviceAccount, String application, TimeValue connectTimeout, TimeValue readTimeout) throws Exception { diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 8fc9e50d7f3..544e0407738 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -117,46 +117,46 @@ thirdPartyAudit.excludes = [ 'com.sun.jersey.spi.container.servlet.ServletContainer', 'com.sun.jersey.spi.inject.Injectable', 'com.sun.jersey.spi.inject.InjectableProvider', - 'io.netty.bootstrap.Bootstrap', - 'io.netty.bootstrap.ChannelFactory', - 'io.netty.bootstrap.ServerBootstrap', - 'io.netty.buffer.ByteBuf', - 'io.netty.buffer.Unpooled', - 'io.netty.channel.Channel', - 'io.netty.channel.ChannelFuture', - 'io.netty.channel.ChannelFutureListener', - 'io.netty.channel.ChannelHandler', - 'io.netty.channel.ChannelHandlerContext', - 'io.netty.channel.ChannelInboundHandlerAdapter', - 'io.netty.channel.ChannelInitializer', - 'io.netty.channel.ChannelPipeline', - 'io.netty.channel.EventLoopGroup', - 'io.netty.channel.SimpleChannelInboundHandler', - 'io.netty.channel.group.ChannelGroup', - 'io.netty.channel.group.ChannelGroupFuture', - 'io.netty.channel.group.DefaultChannelGroup', - 'io.netty.channel.nio.NioEventLoopGroup', - 'io.netty.channel.socket.SocketChannel', - 'io.netty.channel.socket.nio.NioServerSocketChannel', - 'io.netty.channel.socket.nio.NioSocketChannel', - 'io.netty.handler.codec.http.DefaultFullHttpRequest', - 'io.netty.handler.codec.http.DefaultFullHttpResponse', - 'io.netty.handler.codec.http.DefaultHttpResponse', - 'io.netty.handler.codec.http.HttpContent', - 'io.netty.handler.codec.http.HttpHeaders', - 'io.netty.handler.codec.http.HttpMethod', - 'io.netty.handler.codec.http.HttpRequest', - 'io.netty.handler.codec.http.HttpRequestDecoder', - 'io.netty.handler.codec.http.HttpRequestEncoder', - 'io.netty.handler.codec.http.HttpResponseEncoder', - 'io.netty.handler.codec.http.HttpResponseStatus', - 'io.netty.handler.codec.http.HttpVersion', - 'io.netty.handler.codec.http.QueryStringDecoder', - 'io.netty.handler.codec.string.StringEncoder', - 'io.netty.handler.ssl.SslHandler', - 'io.netty.handler.stream.ChunkedStream', - 'io.netty.handler.stream.ChunkedWriteHandler', - 'io.netty.util.concurrent.GlobalEventExecutor', + 'io.netty.bootstrap.Bootstrap', + 'io.netty.bootstrap.ChannelFactory', + 'io.netty.bootstrap.ServerBootstrap', + 'io.netty.buffer.ByteBuf', + 'io.netty.buffer.Unpooled', + 'io.netty.channel.Channel', + 'io.netty.channel.ChannelFuture', + 'io.netty.channel.ChannelFutureListener', + 'io.netty.channel.ChannelHandler', + 'io.netty.channel.ChannelHandlerContext', + 'io.netty.channel.ChannelInboundHandlerAdapter', + 'io.netty.channel.ChannelInitializer', + 'io.netty.channel.ChannelPipeline', + 'io.netty.channel.EventLoopGroup', + 'io.netty.channel.SimpleChannelInboundHandler', + 'io.netty.channel.group.ChannelGroup', + 'io.netty.channel.group.ChannelGroupFuture', + 'io.netty.channel.group.DefaultChannelGroup', + 'io.netty.channel.nio.NioEventLoopGroup', + 'io.netty.channel.socket.SocketChannel', + 'io.netty.channel.socket.nio.NioServerSocketChannel', + 'io.netty.channel.socket.nio.NioSocketChannel', + 'io.netty.handler.codec.http.DefaultFullHttpRequest', + 'io.netty.handler.codec.http.DefaultFullHttpResponse', + 'io.netty.handler.codec.http.DefaultHttpResponse', + 'io.netty.handler.codec.http.HttpContent', + 'io.netty.handler.codec.http.HttpHeaders', + 'io.netty.handler.codec.http.HttpMethod', + 'io.netty.handler.codec.http.HttpRequest', + 'io.netty.handler.codec.http.HttpRequestDecoder', + 'io.netty.handler.codec.http.HttpRequestEncoder', + 'io.netty.handler.codec.http.HttpResponseEncoder', + 'io.netty.handler.codec.http.HttpResponseStatus', + 'io.netty.handler.codec.http.HttpVersion', + 'io.netty.handler.codec.http.QueryStringDecoder', + 'io.netty.handler.codec.string.StringEncoder', + 'io.netty.handler.ssl.SslHandler', + 'io.netty.handler.stream.ChunkedStream', + 'io.netty.handler.stream.ChunkedWriteHandler', + 'io.netty.util.concurrent.GlobalEventExecutor', 'javax.ws.rs.core.Context', 'javax.ws.rs.core.MediaType', 'javax.ws.rs.core.MultivaluedMap', diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java index 6ba726e2b24..a22178315f7 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsBlobContainer.java @@ -131,7 +131,7 @@ final class HdfsBlobContainer extends AbstractBlobContainer { } @Override - public Map listBlobsByPrefix(final @Nullable String prefix) throws IOException { + public Map listBlobsByPrefix(@Nullable final String prefix) throws IOException { FileStatus[] files = store.execute(new Operation() { @Override public FileStatus[] run(FileContext fileContext) throws IOException { diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java index ab539aeea1c..d4af26c3bbc 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsPlugin.java @@ -23,15 +23,18 @@ import java.nio.file.Files; import java.nio.file.Path; import java.security.AccessController; import java.security.PrivilegedAction; +import java.util.Collections; +import java.util.Map; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; +import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.RepositoriesModule; +import org.elasticsearch.repositories.Repository; -// Code -public final class HdfsPlugin extends Plugin { +public final class HdfsPlugin extends Plugin implements RepositoryPlugin { // initialize some problematic classes with elevated privileges static { @@ -84,7 +87,8 @@ public final class HdfsPlugin extends Plugin { return null; } - public void onModule(RepositoriesModule repositoriesModule) { - repositoriesModule.registerRepository("hdfs", HdfsRepository.class, BlobStoreIndexShardRepository.class); + @Override + public Map getRepositories(Environment env) { + return Collections.singletonMap("hdfs", (metadata) -> new HdfsRepository(metadata, env)); } } diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 1e8e267bd41..b111a5d0d0a 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -37,22 +37,20 @@ import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.UnsupportedFileSystemException; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.SpecialPermission; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.index.snapshots.IndexShardRepository; -import org.elasticsearch.repositories.RepositoryName; -import org.elasticsearch.repositories.RepositorySettings; +import org.elasticsearch.env.Environment; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; public final class HdfsRepository extends BlobStoreRepository { private final BlobPath basePath = BlobPath.cleanPath(); - private final RepositorySettings repositorySettings; private final ByteSizeValue chunkSize; private final boolean compress; @@ -62,18 +60,16 @@ public final class HdfsRepository extends BlobStoreRepository { // TODO: why 100KB? private static final ByteSizeValue DEFAULT_BUFFER_SIZE = new ByteSizeValue(100, ByteSizeUnit.KB); - @Inject - public HdfsRepository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository) throws IOException { - super(name.getName(), repositorySettings, indexShardRepository); - this.repositorySettings = repositorySettings; + public HdfsRepository(RepositoryMetaData metadata, Environment environment) throws IOException { + super(metadata, environment.settings()); - this.chunkSize = repositorySettings.settings().getAsBytesSize("chunk_size", null); - this.compress = repositorySettings.settings().getAsBoolean("compress", false); + this.chunkSize = metadata.settings().getAsBytesSize("chunk_size", null); + this.compress = metadata.settings().getAsBoolean("compress", false); } @Override protected void doStart() { - String uriSetting = repositorySettings.settings().get("uri"); + String uriSetting = getMetadata().settings().get("uri"); if (Strings.hasText(uriSetting) == false) { throw new IllegalArgumentException("No 'uri' defined for hdfs snapshot/restore"); } @@ -87,13 +83,13 @@ public final class HdfsRepository extends BlobStoreRepository { "Use 'path' option to specify a path [%s], not the uri [%s] for hdfs snapshot/restore", uri.getPath(), uriSetting)); } - String pathSetting = repositorySettings.settings().get("path"); + String pathSetting = getMetadata().settings().get("path"); // get configuration if (pathSetting == null) { throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore"); } - int bufferSize = repositorySettings.settings().getAsBytesSize("buffer_size", DEFAULT_BUFFER_SIZE).bytesAsInt(); + int bufferSize = getMetadata().settings().getAsBytesSize("buffer_size", DEFAULT_BUFFER_SIZE).bytesAsInt(); try { // initialize our filecontext @@ -104,7 +100,7 @@ public final class HdfsRepository extends BlobStoreRepository { FileContext fileContext = AccessController.doPrivileged(new PrivilegedAction() { @Override public FileContext run() { - return createContext(uri, repositorySettings); + return createContext(uri, getMetadata().settings()); } }); blobStore = new HdfsBlobStore(fileContext, pathSetting, bufferSize); @@ -117,12 +113,12 @@ public final class HdfsRepository extends BlobStoreRepository { // create hadoop filecontext @SuppressForbidden(reason = "lesser of two evils (the other being a bunch of JNI/classloader nightmares)") - private static FileContext createContext(URI uri, RepositorySettings repositorySettings) { - Configuration cfg = new Configuration(repositorySettings.settings().getAsBoolean("load_defaults", true)); + private static FileContext createContext(URI uri, Settings repositorySettings) { + Configuration cfg = new Configuration(repositorySettings.getAsBoolean("load_defaults", true)); cfg.setClassLoader(HdfsRepository.class.getClassLoader()); cfg.reloadConfiguration(); - Map map = repositorySettings.settings().getByPrefix("conf.").getAsMap(); + Map map = repositorySettings.getByPrefix("conf.").getAsMap(); for (Entry entry : map.entrySet()) { cfg.set(entry.getKey(), entry.getValue()); } diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml index 7db9a429230..20019686d3d 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yaml @@ -23,11 +23,6 @@ number_of_shards: 1 number_of_replicas: 1 - # Wait for yellow - - do: - cluster.health: - wait_for_status: yellow - # Create snapshot - do: snapshot.create: diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java index 15cd55c823d..43de8a3ba27 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/AwsS3Service.java @@ -31,7 +31,7 @@ import java.util.function.Function; /** * */ -public interface AwsS3Service extends LifecycleComponent { +public interface AwsS3Service extends LifecycleComponent { // Global AWS settings (shared between discovery-ec2 and repository-s3) // Each setting starting with `cloud.aws` also exists in discovery-ec2 project. Don't forget to update @@ -155,5 +155,5 @@ public interface AwsS3Service extends LifecycleComponent { } AmazonS3 client(String endpoint, Protocol protocol, String region, String account, String key, Integer maxRetries, - boolean useThrottleRetries); + boolean useThrottleRetries, Boolean pathStyleAccess); } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java index 287973a3a90..36d383d865c 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java @@ -31,6 +31,7 @@ import com.amazonaws.http.IdleConnectionReaper; import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; +import com.amazonaws.services.s3.S3ClientOptions; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; @@ -44,21 +45,20 @@ import java.util.Map; /** * */ -public class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsS3Service { +public class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsS3Service { /** * (acceskey, endpoint) -> client */ private Map, AmazonS3Client> clients = new HashMap<>(); - @Inject public InternalAwsS3Service(Settings settings) { super(settings); } @Override public synchronized AmazonS3 client(String endpoint, Protocol protocol, String region, String account, String key, Integer maxRetries, - boolean useThrottleRetries) { + boolean useThrottleRetries, Boolean pathStyleAccess) { if (Strings.isNullOrEmpty(endpoint)) { // We need to set the endpoint based on the region if (region != null) { @@ -70,11 +70,11 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent clientDescriptor = new Tuple<>(endpoint, account); AmazonS3Client client = clients.get(clientDescriptor); if (client != null) { @@ -131,6 +131,11 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent { + try { + blobStore.client().getObjectMetadata(blobStore.bucket(), buildKey(blobName)); + return true; + } catch (AmazonS3Exception e) { + return false; + } + }); } catch (AmazonS3Exception e) { return false; - } catch (Throwable e) { + } catch (Exception e) { throw new BlobStoreException("failed to check if blob exists", e); } } @@ -176,4 +183,19 @@ public class S3BlobContainer extends AbstractBlobContainer { return keyPath + blobName; } + /** + * + * Executes a {@link PrivilegedExceptionAction} with privileges enabled. + * + + */ + T doPrivileged(PrivilegedExceptionAction operation) throws IOException { + SecurityManager sm = System.getSecurityManager(); + if (sm != null) { + sm.checkPermission(new SpecialPermission()); + } + try { + return AccessController.doPrivileged(operation); + } catch (PrivilegedActionException e) { + throw (IOException) e.getException(); + } + } } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java index 7d52a067b65..f549b5c3949 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/plugin/repository/s3/S3RepositoryPlugin.java @@ -19,30 +19,28 @@ package org.elasticsearch.plugin.repository.s3; -import org.elasticsearch.SpecialPermission; -import org.elasticsearch.cloud.aws.AwsS3Service; -import org.elasticsearch.cloud.aws.S3Module; -import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.inject.Module; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesModule; -import org.elasticsearch.repositories.s3.S3Repository; - import java.security.AccessController; import java.security.PrivilegedAction; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Map; + +import org.elasticsearch.SpecialPermission; +import org.elasticsearch.cloud.aws.AwsS3Service; +import org.elasticsearch.cloud.aws.InternalAwsS3Service; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.s3.S3Repository; /** - * + * A plugin to add a repository type that writes to and from the AWS S3. */ -public class S3RepositoryPlugin extends Plugin { +public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin { // ClientConfiguration clinit has some classloader problems // TODO: fix that @@ -64,26 +62,28 @@ public class S3RepositoryPlugin extends Plugin { }); } - @Override - public Collection nodeModules() { - Collection modules = new ArrayList<>(); - modules.add(new S3Module()); - return modules; + // overridable for tests + protected AwsS3Service createStorageService(Settings settings) { + return new InternalAwsS3Service(settings); } @Override - @SuppressWarnings("rawtypes") // Supertype declaration has raw types - public Collection> nodeServices() { - return Collections.>singleton(S3Module.getS3ServiceImpl()); + public Map getRepositories(Environment env) { + return Collections.singletonMap(S3Repository.TYPE, + (metadata) -> new S3Repository(metadata, env.settings(), createStorageService(env.settings()))); } - public void onModule(RepositoriesModule repositoriesModule) { - repositoriesModule.registerRepository(S3Repository.TYPE, S3Repository.class, BlobStoreIndexShardRepository.class); + @Override + public List getSettingsFilter() { + return Arrays.asList( + S3Repository.Repository.KEY_SETTING.getKey(), + S3Repository.Repository.SECRET_SETTING.getKey()); } @Override public List> getSettings() { - return Arrays.asList( // Register global cloud aws settings: cloud.aws (might have been registered in ec2 plugin) + return Arrays.asList( + // Register global cloud aws settings: cloud.aws (might have been registered in ec2 plugin) AwsS3Service.KEY_SETTING, AwsS3Service.SECRET_SETTING, AwsS3Service.PROTOCOL_SETTING, @@ -122,21 +122,6 @@ public class S3RepositoryPlugin extends Plugin { S3Repository.Repositories.CANNED_ACL_SETTING, S3Repository.Repositories.BASE_PATH_SETTING, S3Repository.Repositories.USE_THROTTLE_RETRIES_SETTING, - - // Register S3 single repository settings - S3Repository.Repository.KEY_SETTING, - S3Repository.Repository.SECRET_SETTING, - S3Repository.Repository.BUCKET_SETTING, - S3Repository.Repository.ENDPOINT_SETTING, - S3Repository.Repository.PROTOCOL_SETTING, - S3Repository.Repository.REGION_SETTING, - S3Repository.Repository.SERVER_SIDE_ENCRYPTION_SETTING, - S3Repository.Repository.BUFFER_SIZE_SETTING, - S3Repository.Repository.MAX_RETRIES_SETTING, - S3Repository.Repository.CHUNK_SIZE_SETTING, - S3Repository.Repository.COMPRESS_SETTING, - S3Repository.Repository.STORAGE_CLASS_SETTING, - S3Repository.Repository.CANNED_ACL_SETTING, - S3Repository.Repository.BASE_PATH_SETTING); + S3Repository.Repositories.PATH_STYLE_ACCESS_SETTING); } } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index fe184a06aef..cc91173d954 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -19,23 +19,22 @@ package org.elasticsearch.repositories.s3; +import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; import org.elasticsearch.cloud.aws.AwsS3Service; import org.elasticsearch.cloud.aws.AwsS3Service.CLOUD_S3; import org.elasticsearch.cloud.aws.blobstore.S3BlobStore; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.index.snapshots.IndexShardRepository; import org.elasticsearch.repositories.RepositoryException; -import org.elasticsearch.repositories.RepositoryName; -import org.elasticsearch.repositories.RepositorySettings; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import java.io.IOException; @@ -57,7 +56,7 @@ import java.util.function.Function; */ public class S3Repository extends BlobStoreRepository { - public final static String TYPE = "s3"; + public static final String TYPE = "s3"; /** * Global S3 repositories settings. Starting with: repositories.s3 @@ -146,6 +145,15 @@ public class S3Repository extends BlobStoreRepository { * repositories.s3.base_path: Specifies the path within bucket to repository data. Defaults to root directory. */ Setting BASE_PATH_SETTING = Setting.simpleString("repositories.s3.base_path", Property.NodeScope); + /** + * repositories.s3.path_style_access: When set to true configures the client to use path-style access for all requests. + Amazon S3 supports virtual-hosted-style and path-style access in all Regions. The path-style syntax, however, + requires that you use the region-specific endpoint when attempting to access a bucket. + The default behaviour is to detect which access style to use based on the configured endpoint (an IP will result + in path-style access) and the bucket being accessed (some buckets are not valid DNS names). Setting this flag + will result in path-style access being used for all requests. + */ + Setting PATH_STYLE_ACCESS_SETTING = Setting.boolSetting("repositories.s3.path_style_access", false, Property.NodeScope); } /** @@ -157,84 +165,87 @@ public class S3Repository extends BlobStoreRepository { * access_key * @see Repositories#KEY_SETTING */ - Setting KEY_SETTING = Setting.simpleString("access_key", Property.NodeScope, Property.Filtered); + Setting KEY_SETTING = Setting.simpleString("access_key"); /** * secret_key * @see Repositories#SECRET_SETTING */ - Setting SECRET_SETTING = Setting.simpleString("secret_key", Property.NodeScope, Property.Filtered); + Setting SECRET_SETTING = Setting.simpleString("secret_key"); /** * bucket * @see Repositories#BUCKET_SETTING */ - Setting BUCKET_SETTING = Setting.simpleString("bucket", Property.NodeScope); + Setting BUCKET_SETTING = Setting.simpleString("bucket"); /** * endpoint * @see Repositories#ENDPOINT_SETTING */ - Setting ENDPOINT_SETTING = Setting.simpleString("endpoint", Property.NodeScope); + Setting ENDPOINT_SETTING = Setting.simpleString("endpoint"); /** * protocol * @see Repositories#PROTOCOL_SETTING */ - Setting PROTOCOL_SETTING = - new Setting<>("protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope); + Setting PROTOCOL_SETTING = new Setting<>("protocol", "https", s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT))); /** * region * @see Repositories#REGION_SETTING */ - Setting REGION_SETTING = new Setting<>("region", "", s -> s.toLowerCase(Locale.ROOT), Property.NodeScope); + Setting REGION_SETTING = new Setting<>("region", "", s -> s.toLowerCase(Locale.ROOT)); /** * server_side_encryption * @see Repositories#SERVER_SIDE_ENCRYPTION_SETTING */ - Setting SERVER_SIDE_ENCRYPTION_SETTING = - Setting.boolSetting("server_side_encryption", false, Property.NodeScope); + Setting SERVER_SIDE_ENCRYPTION_SETTING = Setting.boolSetting("server_side_encryption", false); /** * buffer_size * @see Repositories#BUFFER_SIZE_SETTING */ Setting BUFFER_SIZE_SETTING = Setting.byteSizeSetting("buffer_size", new ByteSizeValue(100, ByteSizeUnit.MB), - new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(5, ByteSizeUnit.TB), Property.NodeScope); + new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(5, ByteSizeUnit.TB)); /** * max_retries * @see Repositories#MAX_RETRIES_SETTING */ - Setting MAX_RETRIES_SETTING = Setting.intSetting("max_retries", 3, Property.NodeScope); + Setting MAX_RETRIES_SETTING = Setting.intSetting("max_retries", 3); /** * use_throttle_retries * @see Repositories#USE_THROTTLE_RETRIES_SETTING */ Setting USE_THROTTLE_RETRIES_SETTING = Setting.boolSetting("use_throttle_retries", - ClientConfiguration.DEFAULT_THROTTLE_RETRIES, Property.NodeScope); + ClientConfiguration.DEFAULT_THROTTLE_RETRIES); /** * chunk_size * @see Repositories#CHUNK_SIZE_SETTING */ Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", new ByteSizeValue(1, ByteSizeUnit.GB), - new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(5, ByteSizeUnit.TB), Property.NodeScope); + new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(5, ByteSizeUnit.TB)); /** * compress * @see Repositories#COMPRESS_SETTING */ - Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, Property.NodeScope); + Setting COMPRESS_SETTING = Setting.boolSetting("compress", false); /** * storage_class * @see Repositories#STORAGE_CLASS_SETTING */ - Setting STORAGE_CLASS_SETTING = Setting.simpleString("storage_class", Property.NodeScope); + Setting STORAGE_CLASS_SETTING = Setting.simpleString("storage_class"); /** * canned_acl * @see Repositories#CANNED_ACL_SETTING */ - Setting CANNED_ACL_SETTING = Setting.simpleString("canned_acl", Property.NodeScope); + Setting CANNED_ACL_SETTING = Setting.simpleString("canned_acl"); /** * base_path * @see Repositories#BASE_PATH_SETTING */ - Setting BASE_PATH_SETTING = Setting.simpleString("base_path", Property.NodeScope); + Setting BASE_PATH_SETTING = Setting.simpleString("base_path"); + /** + * path_style_access + * @see Repositories#PATH_STYLE_ACCESS_SETTING + */ + Setting PATH_STYLE_ACCESS_SETTING = Setting.boolSetting("path_style_access", false); } private final S3BlobStore blobStore; @@ -246,60 +257,63 @@ public class S3Repository extends BlobStoreRepository { private boolean compress; /** - * Constructs new shared file system repository - * - * @param name repository name - * @param repositorySettings repository settings - * @param indexShardRepository index shard repository - * @param s3Service S3 service + * Constructs an s3 backed repository */ - @Inject - public S3Repository(RepositoryName name, RepositorySettings repositorySettings, IndexShardRepository indexShardRepository, AwsS3Service s3Service) throws IOException { - super(name.getName(), repositorySettings, indexShardRepository); + public S3Repository(RepositoryMetaData metadata, Settings settings, AwsS3Service s3Service) throws IOException { + super(metadata, settings); - String bucket = getValue(repositorySettings, Repository.BUCKET_SETTING, Repositories.BUCKET_SETTING); + String bucket = getValue(metadata.settings(), settings, Repository.BUCKET_SETTING, Repositories.BUCKET_SETTING); if (bucket == null) { - throw new RepositoryException(name.name(), "No bucket defined for s3 gateway"); + throw new RepositoryException(metadata.name(), "No bucket defined for s3 gateway"); } - String endpoint = getValue(repositorySettings, Repository.ENDPOINT_SETTING, Repositories.ENDPOINT_SETTING); - Protocol protocol = getValue(repositorySettings, Repository.PROTOCOL_SETTING, Repositories.PROTOCOL_SETTING); - String region = getValue(repositorySettings, Repository.REGION_SETTING, Repositories.REGION_SETTING); + String endpoint = getValue(metadata.settings(), settings, Repository.ENDPOINT_SETTING, Repositories.ENDPOINT_SETTING); + Protocol protocol = getValue(metadata.settings(), settings, Repository.PROTOCOL_SETTING, Repositories.PROTOCOL_SETTING); + String region = getValue(metadata.settings(), settings, Repository.REGION_SETTING, Repositories.REGION_SETTING); // If no region is defined either in region, repositories.s3.region, cloud.aws.s3.region or cloud.aws.region // we fallback to Default bucket - null if (Strings.isEmpty(region)) { region = null; } - boolean serverSideEncryption = getValue(repositorySettings, Repository.SERVER_SIDE_ENCRYPTION_SETTING, Repositories.SERVER_SIDE_ENCRYPTION_SETTING); - ByteSizeValue bufferSize = getValue(repositorySettings, Repository.BUFFER_SIZE_SETTING, Repositories.BUFFER_SIZE_SETTING); - Integer maxRetries = getValue(repositorySettings, Repository.MAX_RETRIES_SETTING, Repositories.MAX_RETRIES_SETTING); - boolean useThrottleRetries = getValue(repositorySettings, Repository.USE_THROTTLE_RETRIES_SETTING, Repositories.USE_THROTTLE_RETRIES_SETTING); - this.chunkSize = getValue(repositorySettings, Repository.CHUNK_SIZE_SETTING, Repositories.CHUNK_SIZE_SETTING); - this.compress = getValue(repositorySettings, Repository.COMPRESS_SETTING, Repositories.COMPRESS_SETTING); + boolean serverSideEncryption = getValue(metadata.settings(), settings, Repository.SERVER_SIDE_ENCRYPTION_SETTING, Repositories.SERVER_SIDE_ENCRYPTION_SETTING); + ByteSizeValue bufferSize = getValue(metadata.settings(), settings, Repository.BUFFER_SIZE_SETTING, Repositories.BUFFER_SIZE_SETTING); + Integer maxRetries = getValue(metadata.settings(), settings, Repository.MAX_RETRIES_SETTING, Repositories.MAX_RETRIES_SETTING); + boolean useThrottleRetries = getValue(metadata.settings(), settings, Repository.USE_THROTTLE_RETRIES_SETTING, Repositories.USE_THROTTLE_RETRIES_SETTING); + this.chunkSize = getValue(metadata.settings(), settings, Repository.CHUNK_SIZE_SETTING, Repositories.CHUNK_SIZE_SETTING); + this.compress = getValue(metadata.settings(), settings, Repository.COMPRESS_SETTING, Repositories.COMPRESS_SETTING); // We make sure that chunkSize is bigger or equal than/to bufferSize if (this.chunkSize.getBytes() < bufferSize.getBytes()) { - throw new RepositoryException(name.name(), Repository.CHUNK_SIZE_SETTING.getKey() + " (" + this.chunkSize + + throw new RepositoryException(metadata.name(), Repository.CHUNK_SIZE_SETTING.getKey() + " (" + this.chunkSize + ") can't be lower than " + Repository.BUFFER_SIZE_SETTING.getKey() + " (" + bufferSize + ")."); } // Parse and validate the user's S3 Storage Class setting - String storageClass = getValue(repositorySettings, Repository.STORAGE_CLASS_SETTING, Repositories.STORAGE_CLASS_SETTING); - String cannedACL = getValue(repositorySettings, Repository.CANNED_ACL_SETTING, Repositories.CANNED_ACL_SETTING); + String storageClass = getValue(metadata.settings(), settings, Repository.STORAGE_CLASS_SETTING, Repositories.STORAGE_CLASS_SETTING); + String cannedACL = getValue(metadata.settings(), settings, Repository.CANNED_ACL_SETTING, Repositories.CANNED_ACL_SETTING); + + // If the user defined a path style access setting, we rely on it otherwise we use the default + // value set by the SDK + Boolean pathStyleAccess = null; + if (Repository.PATH_STYLE_ACCESS_SETTING.exists(metadata.settings()) || + Repositories.PATH_STYLE_ACCESS_SETTING.exists(settings)) { + pathStyleAccess = getValue(metadata.settings(), settings, Repository.PATH_STYLE_ACCESS_SETTING, Repositories.PATH_STYLE_ACCESS_SETTING); + } logger.debug("using bucket [{}], region [{}], endpoint [{}], protocol [{}], chunk_size [{}], server_side_encryption [{}], " + - "buffer_size [{}], max_retries [{}], use_throttle_retries [{}], cannedACL [{}], storageClass [{}]", + "buffer_size [{}], max_retries [{}], use_throttle_retries [{}], cannedACL [{}], storageClass [{}], path_style_access [{}]", bucket, region, endpoint, protocol, chunkSize, serverSideEncryption, bufferSize, maxRetries, useThrottleRetries, cannedACL, - storageClass); + storageClass, pathStyleAccess); - String key = getValue(repositorySettings, Repository.KEY_SETTING, Repositories.KEY_SETTING); - String secret = getValue(repositorySettings, Repository.SECRET_SETTING, Repositories.SECRET_SETTING); + String key = getValue(metadata.settings(), settings, Repository.KEY_SETTING, Repositories.KEY_SETTING); + String secret = getValue(metadata.settings(), settings, Repository.SECRET_SETTING, Repositories.SECRET_SETTING); - blobStore = new S3BlobStore(settings, s3Service.client(endpoint, protocol, region, key, secret, maxRetries, useThrottleRetries), + blobStore = new S3BlobStore(settings, + s3Service.client(endpoint, protocol, region, key, secret, maxRetries, useThrottleRetries, pathStyleAccess), bucket, region, serverSideEncryption, bufferSize, maxRetries, cannedACL, storageClass); - String basePath = getValue(repositorySettings, Repository.BASE_PATH_SETTING, Repositories.BASE_PATH_SETTING); + String basePath = getValue(metadata.settings(), settings, Repository.BASE_PATH_SETTING, Repositories.BASE_PATH_SETTING); if (Strings.hasLength(basePath)) { BlobPath path = new BlobPath(); for(String elem : basePath.split("/")) { @@ -311,9 +325,6 @@ public class S3Repository extends BlobStoreRepository { } } - /** - * {@inheritDoc} - */ @Override protected BlobStore blobStore() { return blobStore; @@ -324,29 +335,24 @@ public class S3Repository extends BlobStoreRepository { return basePath; } - /** - * {@inheritDoc} - */ @Override protected boolean isCompress() { return compress; } - /** - * {@inheritDoc} - */ @Override protected ByteSizeValue chunkSize() { return chunkSize; } - public static T getValue(RepositorySettings repositorySettings, + public static T getValue(Settings repositorySettings, + Settings globalSettings, Setting repositorySetting, Setting repositoriesSetting) { - if (repositorySetting.exists(repositorySettings.settings())) { - return repositorySetting.get(repositorySettings.settings()); + if (repositorySetting.exists(repositorySettings)) { + return repositorySetting.get(repositorySettings); } else { - return repositoriesSetting.get(repositorySettings.globalSettings()); + return repositoriesSetting.get(globalSettings); } } } diff --git a/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy index e5f26c3e9d1..1f09cada2e5 100644 --- a/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy @@ -22,4 +22,16 @@ grant { // TODO: get these fixed in aws sdk permission java.lang.RuntimePermission "accessDeclaredMembers"; permission java.lang.RuntimePermission "getClassLoader"; + // Needed because of problems in AmazonS3Client: + // When no region is set on a AmazonS3Client instance, the + // AWS SDK loads all known partitions from a JSON file and + // uses a Jackson's ObjectMapper for that: this one, in + // version 2.5.3 with the default binding options, tries + // to suppress access checks of ctor/field/method and thus + // requires this special permission. AWS must be fixed to + // uses Jackson correctly and have the correct modifiers + // on binded classes. + // TODO: get these fixed in aws sdk + // See https://github.com/aws/aws-sdk-java/issues/766 + permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java index 9d1768db58b..b7dc7cc525d 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/AbstractAwsTestCase.java @@ -69,6 +69,6 @@ public abstract class AbstractAwsTestCase extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return pluginList(S3RepositoryPlugin.class); + return pluginList(TestAwsS3Service.TestPlugin.class); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/RepositoryS3SettingsTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/RepositoryS3SettingsTests.java deleted file mode 100644 index 4cb8e4d3abb..00000000000 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/RepositoryS3SettingsTests.java +++ /dev/null @@ -1,353 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cloud.aws; - -import com.amazonaws.Protocol; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.repositories.RepositoryException; -import org.elasticsearch.repositories.RepositoryName; -import org.elasticsearch.repositories.RepositorySettings; -import org.elasticsearch.repositories.s3.S3Repository; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; - -import static org.elasticsearch.repositories.s3.S3Repository.Repositories; -import static org.elasticsearch.repositories.s3.S3Repository.Repository; -import static org.elasticsearch.repositories.s3.S3Repository.getValue; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.isEmptyString; - -public class RepositoryS3SettingsTests extends ESTestCase { - - private static final Settings AWS = Settings.builder() - .put(AwsS3Service.KEY_SETTING.getKey(), "global-key") - .put(AwsS3Service.SECRET_SETTING.getKey(), "global-secret") - .put(AwsS3Service.PROTOCOL_SETTING.getKey(), "https") - .put(AwsS3Service.PROXY_HOST_SETTING.getKey(), "global-proxy-host") - .put(AwsS3Service.PROXY_PORT_SETTING.getKey(), 10000) - .put(AwsS3Service.PROXY_USERNAME_SETTING.getKey(), "global-proxy-username") - .put(AwsS3Service.PROXY_PASSWORD_SETTING.getKey(), "global-proxy-password") - .put(AwsS3Service.SIGNER_SETTING.getKey(), "global-signer") - .put(AwsS3Service.REGION_SETTING.getKey(), "global-region") - .build(); - - private static final Settings S3 = Settings.builder() - .put(AwsS3Service.CLOUD_S3.KEY_SETTING.getKey(), "s3-key") - .put(AwsS3Service.CLOUD_S3.SECRET_SETTING.getKey(), "s3-secret") - .put(AwsS3Service.CLOUD_S3.PROTOCOL_SETTING.getKey(), "http") - .put(AwsS3Service.CLOUD_S3.PROXY_HOST_SETTING.getKey(), "s3-proxy-host") - .put(AwsS3Service.CLOUD_S3.PROXY_PORT_SETTING.getKey(), 20000) - .put(AwsS3Service.CLOUD_S3.PROXY_USERNAME_SETTING.getKey(), "s3-proxy-username") - .put(AwsS3Service.CLOUD_S3.PROXY_PASSWORD_SETTING.getKey(), "s3-proxy-password") - .put(AwsS3Service.CLOUD_S3.SIGNER_SETTING.getKey(), "s3-signer") - .put(AwsS3Service.CLOUD_S3.REGION_SETTING.getKey(), "s3-region") - .put(AwsS3Service.CLOUD_S3.ENDPOINT_SETTING.getKey(), "s3-endpoint") - .build(); - - private static final Settings REPOSITORIES = Settings.builder() - .put(Repositories.KEY_SETTING.getKey(), "repositories-key") - .put(Repositories.SECRET_SETTING.getKey(), "repositories-secret") - .put(Repositories.BUCKET_SETTING.getKey(), "repositories-bucket") - .put(Repositories.PROTOCOL_SETTING.getKey(), "https") - .put(Repositories.REGION_SETTING.getKey(), "repositories-region") - .put(Repositories.ENDPOINT_SETTING.getKey(), "repositories-endpoint") - .put(Repositories.SERVER_SIDE_ENCRYPTION_SETTING.getKey(), true) - .put(Repositories.BUFFER_SIZE_SETTING.getKey(), "6mb") - .put(Repositories.MAX_RETRIES_SETTING.getKey(), 4) - .put(Repositories.CHUNK_SIZE_SETTING.getKey(), "110mb") - .put(Repositories.COMPRESS_SETTING.getKey(), true) - .put(Repositories.STORAGE_CLASS_SETTING.getKey(), "repositories-class") - .put(Repositories.CANNED_ACL_SETTING.getKey(), "repositories-acl") - .put(Repositories.BASE_PATH_SETTING.getKey(), "repositories-basepath") - .build(); - - private static final Settings REPOSITORY = Settings.builder() - .put(Repository.KEY_SETTING.getKey(), "repository-key") - .put(Repository.SECRET_SETTING.getKey(), "repository-secret") - .put(Repository.BUCKET_SETTING.getKey(), "repository-bucket") - .put(Repository.PROTOCOL_SETTING.getKey(), "https") - .put(Repository.REGION_SETTING.getKey(), "repository-region") - .put(Repository.ENDPOINT_SETTING.getKey(), "repository-endpoint") - .put(Repository.SERVER_SIDE_ENCRYPTION_SETTING.getKey(), false) - .put(Repository.BUFFER_SIZE_SETTING.getKey(), "7mb") - .put(Repository.MAX_RETRIES_SETTING.getKey(), 5) - .put(Repository.CHUNK_SIZE_SETTING.getKey(), "120mb") - .put(Repository.COMPRESS_SETTING.getKey(), false) - .put(Repository.STORAGE_CLASS_SETTING.getKey(), "repository-class") - .put(Repository.CANNED_ACL_SETTING.getKey(), "repository-acl") - .put(Repository.BASE_PATH_SETTING.getKey(), "repository-basepath") - .build(); - - /** - * We test when only cloud.aws settings are set - */ - public void testRepositorySettingsGlobalOnly() { - Settings nodeSettings = buildSettings(AWS); - RepositorySettings repositorySettings = new RepositorySettings(nodeSettings, Settings.EMPTY); - assertThat(getValue(repositorySettings, Repository.KEY_SETTING, Repositories.KEY_SETTING), is("global-key")); - assertThat(getValue(repositorySettings, Repository.SECRET_SETTING, Repositories.SECRET_SETTING), is("global-secret")); - assertThat(getValue(repositorySettings, Repository.BUCKET_SETTING, Repositories.BUCKET_SETTING), isEmptyString()); - assertThat(getValue(repositorySettings, Repository.PROTOCOL_SETTING, Repositories.PROTOCOL_SETTING), is(Protocol.HTTPS)); - assertThat(getValue(repositorySettings, Repository.REGION_SETTING, Repositories.REGION_SETTING), is("global-region")); - assertThat(getValue(repositorySettings, Repository.ENDPOINT_SETTING, Repositories.ENDPOINT_SETTING), isEmptyString()); - assertThat(AwsS3Service.CLOUD_S3.PROXY_HOST_SETTING.get(nodeSettings), is("global-proxy-host")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PORT_SETTING.get(nodeSettings), is(10000)); - assertThat(AwsS3Service.CLOUD_S3.PROXY_USERNAME_SETTING.get(nodeSettings), is("global-proxy-username")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PASSWORD_SETTING.get(nodeSettings), is("global-proxy-password")); - assertThat(AwsS3Service.CLOUD_S3.SIGNER_SETTING.get(nodeSettings), is("global-signer")); - assertThat(getValue(repositorySettings, Repository.SERVER_SIDE_ENCRYPTION_SETTING, Repositories.SERVER_SIDE_ENCRYPTION_SETTING), - is(false)); - assertThat(getValue(repositorySettings, Repository.BUFFER_SIZE_SETTING, Repositories.BUFFER_SIZE_SETTING).getMb(), is(100L)); - assertThat(getValue(repositorySettings, Repository.MAX_RETRIES_SETTING, Repositories.MAX_RETRIES_SETTING), is(3)); - assertThat(getValue(repositorySettings, Repository.CHUNK_SIZE_SETTING, Repositories.CHUNK_SIZE_SETTING).getGb(), is(1L)); - assertThat(getValue(repositorySettings, Repository.COMPRESS_SETTING, Repositories.COMPRESS_SETTING), is(false)); - assertThat(getValue(repositorySettings, Repository.STORAGE_CLASS_SETTING, Repositories.STORAGE_CLASS_SETTING), isEmptyString()); - assertThat(getValue(repositorySettings, Repository.CANNED_ACL_SETTING, Repositories.CANNED_ACL_SETTING), isEmptyString()); - assertThat(getValue(repositorySettings, Repository.BASE_PATH_SETTING, Repositories.BASE_PATH_SETTING), isEmptyString()); - } - - /** - * We test when cloud.aws settings are overloaded by cloud.aws.s3 settings - */ - public void testRepositorySettingsGlobalOverloadedByS3() { - Settings nodeSettings = buildSettings(AWS, S3); - RepositorySettings repositorySettings = new RepositorySettings(nodeSettings, Settings.EMPTY); - assertThat(getValue(repositorySettings, Repository.KEY_SETTING, Repositories.KEY_SETTING), is("s3-key")); - assertThat(getValue(repositorySettings, Repository.SECRET_SETTING, Repositories.SECRET_SETTING), is("s3-secret")); - assertThat(getValue(repositorySettings, Repository.BUCKET_SETTING, Repositories.BUCKET_SETTING), isEmptyString()); - assertThat(getValue(repositorySettings, Repository.PROTOCOL_SETTING, Repositories.PROTOCOL_SETTING), is(Protocol.HTTP)); - assertThat(getValue(repositorySettings, Repository.REGION_SETTING, Repositories.REGION_SETTING), is("s3-region")); - assertThat(getValue(repositorySettings, Repository.ENDPOINT_SETTING, Repositories.ENDPOINT_SETTING), is("s3-endpoint")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_HOST_SETTING.get(nodeSettings), is("s3-proxy-host")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PORT_SETTING.get(nodeSettings), is(20000)); - assertThat(AwsS3Service.CLOUD_S3.PROXY_USERNAME_SETTING.get(nodeSettings), is("s3-proxy-username")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PASSWORD_SETTING.get(nodeSettings), is("s3-proxy-password")); - assertThat(AwsS3Service.CLOUD_S3.SIGNER_SETTING.get(nodeSettings), is("s3-signer")); - assertThat(getValue(repositorySettings, Repository.SERVER_SIDE_ENCRYPTION_SETTING, Repositories.SERVER_SIDE_ENCRYPTION_SETTING), - is(false)); - assertThat(getValue(repositorySettings, Repository.BUFFER_SIZE_SETTING, Repositories.BUFFER_SIZE_SETTING).getMb(), is(100L)); - assertThat(getValue(repositorySettings, Repository.MAX_RETRIES_SETTING, Repositories.MAX_RETRIES_SETTING), is(3)); - assertThat(getValue(repositorySettings, Repository.CHUNK_SIZE_SETTING, Repositories.CHUNK_SIZE_SETTING).getGb(), is(1L)); - assertThat(getValue(repositorySettings, Repository.COMPRESS_SETTING, Repositories.COMPRESS_SETTING), is(false)); - assertThat(getValue(repositorySettings, Repository.STORAGE_CLASS_SETTING, Repositories.STORAGE_CLASS_SETTING), isEmptyString()); - assertThat(getValue(repositorySettings, Repository.CANNED_ACL_SETTING, Repositories.CANNED_ACL_SETTING), isEmptyString()); - assertThat(getValue(repositorySettings, Repository.BASE_PATH_SETTING, Repositories.BASE_PATH_SETTING), isEmptyString()); - } - - /** - * We test when cloud.aws settings are overloaded by repositories.s3 settings - */ - public void testRepositorySettingsGlobalOverloadedByRepositories() { - Settings nodeSettings = buildSettings(AWS, REPOSITORIES); - RepositorySettings repositorySettings = new RepositorySettings(nodeSettings, Settings.EMPTY); - assertThat(getValue(repositorySettings, Repository.KEY_SETTING, Repositories.KEY_SETTING), is("repositories-key")); - assertThat(getValue(repositorySettings, Repository.SECRET_SETTING, Repositories.SECRET_SETTING), is("repositories-secret")); - assertThat(getValue(repositorySettings, Repository.BUCKET_SETTING, Repositories.BUCKET_SETTING), is("repositories-bucket")); - assertThat(getValue(repositorySettings, Repository.PROTOCOL_SETTING, Repositories.PROTOCOL_SETTING), is(Protocol.HTTPS)); - assertThat(getValue(repositorySettings, Repository.REGION_SETTING, Repositories.REGION_SETTING), is("repositories-region")); - assertThat(getValue(repositorySettings, Repository.ENDPOINT_SETTING, Repositories.ENDPOINT_SETTING), is("repositories-endpoint")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_HOST_SETTING.get(nodeSettings), is("global-proxy-host")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PORT_SETTING.get(nodeSettings), is(10000)); - assertThat(AwsS3Service.CLOUD_S3.PROXY_USERNAME_SETTING.get(nodeSettings), is("global-proxy-username")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PASSWORD_SETTING.get(nodeSettings), is("global-proxy-password")); - assertThat(AwsS3Service.CLOUD_S3.SIGNER_SETTING.get(nodeSettings), is("global-signer")); - assertThat(getValue(repositorySettings, Repository.SERVER_SIDE_ENCRYPTION_SETTING, Repositories.SERVER_SIDE_ENCRYPTION_SETTING), - is(true)); - assertThat(getValue(repositorySettings, Repository.BUFFER_SIZE_SETTING, Repositories.BUFFER_SIZE_SETTING).getMb(), is(6L)); - assertThat(getValue(repositorySettings, Repository.MAX_RETRIES_SETTING, Repositories.MAX_RETRIES_SETTING), is(4)); - assertThat(getValue(repositorySettings, Repository.CHUNK_SIZE_SETTING, Repositories.CHUNK_SIZE_SETTING).getMb(), is(110L)); - assertThat(getValue(repositorySettings, Repository.COMPRESS_SETTING, Repositories.COMPRESS_SETTING), is(true)); - assertThat(getValue(repositorySettings, Repository.STORAGE_CLASS_SETTING, Repositories.STORAGE_CLASS_SETTING), - is("repositories-class")); - assertThat(getValue(repositorySettings, Repository.CANNED_ACL_SETTING, Repositories.CANNED_ACL_SETTING), is("repositories-acl")); - assertThat(getValue(repositorySettings, Repository.BASE_PATH_SETTING, Repositories.BASE_PATH_SETTING), is("repositories-basepath")); - } - - /** - * We test when cloud.aws.s3 settings are overloaded by repositories.s3 settings - */ - public void testRepositorySettingsS3OverloadedByRepositories() { - Settings nodeSettings = buildSettings(AWS, S3, REPOSITORIES); - RepositorySettings repositorySettings = new RepositorySettings(nodeSettings, Settings.EMPTY); - assertThat(getValue(repositorySettings, Repository.KEY_SETTING, Repositories.KEY_SETTING), is("repositories-key")); - assertThat(getValue(repositorySettings, Repository.SECRET_SETTING, Repositories.SECRET_SETTING), is("repositories-secret")); - assertThat(getValue(repositorySettings, Repository.BUCKET_SETTING, Repositories.BUCKET_SETTING), is("repositories-bucket")); - assertThat(getValue(repositorySettings, Repository.PROTOCOL_SETTING, Repositories.PROTOCOL_SETTING), is(Protocol.HTTPS)); - assertThat(getValue(repositorySettings, Repository.REGION_SETTING, Repositories.REGION_SETTING), is("repositories-region")); - assertThat(getValue(repositorySettings, Repository.ENDPOINT_SETTING, Repositories.ENDPOINT_SETTING), is("repositories-endpoint")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_HOST_SETTING.get(nodeSettings), is("s3-proxy-host")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PORT_SETTING.get(nodeSettings), is(20000)); - assertThat(AwsS3Service.CLOUD_S3.PROXY_USERNAME_SETTING.get(nodeSettings), is("s3-proxy-username")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PASSWORD_SETTING.get(nodeSettings), is("s3-proxy-password")); - assertThat(AwsS3Service.CLOUD_S3.SIGNER_SETTING.get(nodeSettings), is("s3-signer")); - assertThat(getValue(repositorySettings, Repository.SERVER_SIDE_ENCRYPTION_SETTING, Repositories.SERVER_SIDE_ENCRYPTION_SETTING), - is(true)); - assertThat(getValue(repositorySettings, Repository.BUFFER_SIZE_SETTING, Repositories.BUFFER_SIZE_SETTING).getMb(), is(6L)); - assertThat(getValue(repositorySettings, Repository.MAX_RETRIES_SETTING, Repositories.MAX_RETRIES_SETTING), is(4)); - assertThat(getValue(repositorySettings, Repository.CHUNK_SIZE_SETTING, Repositories.CHUNK_SIZE_SETTING).getMb(), is(110L)); - assertThat(getValue(repositorySettings, Repository.COMPRESS_SETTING, Repositories.COMPRESS_SETTING), is(true)); - assertThat(getValue(repositorySettings, Repository.STORAGE_CLASS_SETTING, Repositories.STORAGE_CLASS_SETTING), - is("repositories-class")); - assertThat(getValue(repositorySettings, Repository.CANNED_ACL_SETTING, Repositories.CANNED_ACL_SETTING), is("repositories-acl")); - assertThat(getValue(repositorySettings, Repository.BASE_PATH_SETTING, Repositories.BASE_PATH_SETTING), is("repositories-basepath")); - } - - /** - * We test when cloud.aws settings are overloaded by single repository settings - */ - public void testRepositorySettingsGlobalOverloadedByRepository() { - Settings nodeSettings = buildSettings(AWS); - RepositorySettings repositorySettings = new RepositorySettings(nodeSettings, REPOSITORY); - assertThat(getValue(repositorySettings, Repository.KEY_SETTING, Repositories.KEY_SETTING), is("repository-key")); - assertThat(getValue(repositorySettings, Repository.SECRET_SETTING, Repositories.SECRET_SETTING), is("repository-secret")); - assertThat(getValue(repositorySettings, Repository.BUCKET_SETTING, Repositories.BUCKET_SETTING), is("repository-bucket")); - assertThat(getValue(repositorySettings, Repository.PROTOCOL_SETTING, Repositories.PROTOCOL_SETTING), is(Protocol.HTTPS)); - assertThat(getValue(repositorySettings, Repository.REGION_SETTING, Repositories.REGION_SETTING), is("repository-region")); - assertThat(getValue(repositorySettings, Repository.ENDPOINT_SETTING, Repositories.ENDPOINT_SETTING), is("repository-endpoint")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_HOST_SETTING.get(nodeSettings), is("global-proxy-host")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PORT_SETTING.get(nodeSettings), is(10000)); - assertThat(AwsS3Service.CLOUD_S3.PROXY_USERNAME_SETTING.get(nodeSettings), is("global-proxy-username")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PASSWORD_SETTING.get(nodeSettings), is("global-proxy-password")); - assertThat(AwsS3Service.CLOUD_S3.SIGNER_SETTING.get(nodeSettings), is("global-signer")); - assertThat(getValue(repositorySettings, Repository.SERVER_SIDE_ENCRYPTION_SETTING, Repositories.SERVER_SIDE_ENCRYPTION_SETTING), - is(false)); - assertThat(getValue(repositorySettings, Repository.BUFFER_SIZE_SETTING, Repositories.BUFFER_SIZE_SETTING).getMb(), is(7L)); - assertThat(getValue(repositorySettings, Repository.MAX_RETRIES_SETTING, Repositories.MAX_RETRIES_SETTING), is(5)); - assertThat(getValue(repositorySettings, Repository.CHUNK_SIZE_SETTING, Repositories.CHUNK_SIZE_SETTING).getMb(), is(120L)); - assertThat(getValue(repositorySettings, Repository.COMPRESS_SETTING, Repositories.COMPRESS_SETTING), is(false)); - assertThat(getValue(repositorySettings, Repository.STORAGE_CLASS_SETTING, Repositories.STORAGE_CLASS_SETTING), - is("repository-class")); - assertThat(getValue(repositorySettings, Repository.CANNED_ACL_SETTING, Repositories.CANNED_ACL_SETTING), is("repository-acl")); - assertThat(getValue(repositorySettings, Repository.BASE_PATH_SETTING, Repositories.BASE_PATH_SETTING), is("repository-basepath")); - } - - /** - * We test when cloud.aws.s3 settings are overloaded by single repository settings - */ - public void testRepositorySettingsS3OverloadedByRepository() { - Settings nodeSettings = buildSettings(AWS, S3); - RepositorySettings repositorySettings = new RepositorySettings(nodeSettings, REPOSITORY); - assertThat(getValue(repositorySettings, Repository.KEY_SETTING, Repositories.KEY_SETTING), is("repository-key")); - assertThat(getValue(repositorySettings, Repository.SECRET_SETTING, Repositories.SECRET_SETTING), is("repository-secret")); - assertThat(getValue(repositorySettings, Repository.BUCKET_SETTING, Repositories.BUCKET_SETTING), is("repository-bucket")); - assertThat(getValue(repositorySettings, Repository.PROTOCOL_SETTING, Repositories.PROTOCOL_SETTING), is(Protocol.HTTPS)); - assertThat(getValue(repositorySettings, Repository.REGION_SETTING, Repositories.REGION_SETTING), is("repository-region")); - assertThat(getValue(repositorySettings, Repository.ENDPOINT_SETTING, Repositories.ENDPOINT_SETTING), is("repository-endpoint")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_HOST_SETTING.get(nodeSettings), is("s3-proxy-host")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PORT_SETTING.get(nodeSettings), is(20000)); - assertThat(AwsS3Service.CLOUD_S3.PROXY_USERNAME_SETTING.get(nodeSettings), is("s3-proxy-username")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PASSWORD_SETTING.get(nodeSettings), is("s3-proxy-password")); - assertThat(AwsS3Service.CLOUD_S3.SIGNER_SETTING.get(nodeSettings), is("s3-signer")); - assertThat(getValue(repositorySettings, Repository.SERVER_SIDE_ENCRYPTION_SETTING, Repositories.SERVER_SIDE_ENCRYPTION_SETTING), - is(false)); - assertThat(getValue(repositorySettings, Repository.BUFFER_SIZE_SETTING, Repositories.BUFFER_SIZE_SETTING).getMb(), is(7L)); - assertThat(getValue(repositorySettings, Repository.MAX_RETRIES_SETTING, Repositories.MAX_RETRIES_SETTING), is(5)); - assertThat(getValue(repositorySettings, Repository.CHUNK_SIZE_SETTING, Repositories.CHUNK_SIZE_SETTING).getMb(), is(120L)); - assertThat(getValue(repositorySettings, Repository.COMPRESS_SETTING, Repositories.COMPRESS_SETTING), is(false)); - assertThat(getValue(repositorySettings, Repository.STORAGE_CLASS_SETTING, Repositories.STORAGE_CLASS_SETTING), - is("repository-class")); - assertThat(getValue(repositorySettings, Repository.CANNED_ACL_SETTING, Repositories.CANNED_ACL_SETTING), is("repository-acl")); - assertThat(getValue(repositorySettings, Repository.BASE_PATH_SETTING, Repositories.BASE_PATH_SETTING), is("repository-basepath")); - } - - /** - * We test when repositories settings are overloaded by single repository settings - */ - public void testRepositorySettingsRepositoriesOverloadedByRepository() { - Settings nodeSettings = buildSettings(AWS, S3, REPOSITORIES); - RepositorySettings repositorySettings = new RepositorySettings(nodeSettings, REPOSITORY); - assertThat(getValue(repositorySettings, Repository.KEY_SETTING, Repositories.KEY_SETTING), is("repository-key")); - assertThat(getValue(repositorySettings, Repository.SECRET_SETTING, Repositories.SECRET_SETTING), is("repository-secret")); - assertThat(getValue(repositorySettings, Repository.BUCKET_SETTING, Repositories.BUCKET_SETTING), is("repository-bucket")); - assertThat(getValue(repositorySettings, Repository.PROTOCOL_SETTING, Repositories.PROTOCOL_SETTING), is(Protocol.HTTPS)); - assertThat(getValue(repositorySettings, Repository.REGION_SETTING, Repositories.REGION_SETTING), is("repository-region")); - assertThat(getValue(repositorySettings, Repository.ENDPOINT_SETTING, Repositories.ENDPOINT_SETTING), is("repository-endpoint")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_HOST_SETTING.get(nodeSettings), is("s3-proxy-host")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PORT_SETTING.get(nodeSettings), is(20000)); - assertThat(AwsS3Service.CLOUD_S3.PROXY_USERNAME_SETTING.get(nodeSettings), is("s3-proxy-username")); - assertThat(AwsS3Service.CLOUD_S3.PROXY_PASSWORD_SETTING.get(nodeSettings), is("s3-proxy-password")); - assertThat(AwsS3Service.CLOUD_S3.SIGNER_SETTING.get(nodeSettings), is("s3-signer")); - assertThat(getValue(repositorySettings, Repository.SERVER_SIDE_ENCRYPTION_SETTING, Repositories.SERVER_SIDE_ENCRYPTION_SETTING), - is(false)); - assertThat(getValue(repositorySettings, Repository.BUFFER_SIZE_SETTING, Repositories.BUFFER_SIZE_SETTING).getMb(), is(7L)); - assertThat(getValue(repositorySettings, Repository.MAX_RETRIES_SETTING, Repositories.MAX_RETRIES_SETTING), is(5)); - assertThat(getValue(repositorySettings, Repository.CHUNK_SIZE_SETTING, Repositories.CHUNK_SIZE_SETTING).getMb(), is(120L)); - assertThat(getValue(repositorySettings, Repository.COMPRESS_SETTING, Repositories.COMPRESS_SETTING), is(false)); - assertThat(getValue(repositorySettings, Repository.STORAGE_CLASS_SETTING, Repositories.STORAGE_CLASS_SETTING), - is("repository-class")); - assertThat(getValue(repositorySettings, Repository.CANNED_ACL_SETTING, Repositories.CANNED_ACL_SETTING), is("repository-acl")); - assertThat(getValue(repositorySettings, Repository.BASE_PATH_SETTING, Repositories.BASE_PATH_SETTING), is("repository-basepath")); - } - - /** - * We test wrong Chunk and Buffer settings - */ - public void testInvalidChunkBufferSizeRepositorySettings() throws IOException { - // chunk < buffer should fail - internalTestInvalidChunkBufferSizeSettings(new ByteSizeValue(10, ByteSizeUnit.MB), new ByteSizeValue(5, ByteSizeUnit.MB), - "chunk_size (5mb) can't be lower than buffer_size (10mb)."); - // chunk > buffer should pass - internalTestInvalidChunkBufferSizeSettings(new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(10, ByteSizeUnit.MB), null); - // chunk = buffer should pass - internalTestInvalidChunkBufferSizeSettings(new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(5, ByteSizeUnit.MB), null); - // buffer < 5mb should fail - internalTestInvalidChunkBufferSizeSettings(new ByteSizeValue(4, ByteSizeUnit.MB), new ByteSizeValue(10, ByteSizeUnit.MB), - "Failed to parse value [4mb] for setting [buffer_size] must be >= 5mb"); - // chunk > 5tb should fail - internalTestInvalidChunkBufferSizeSettings(new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(6, ByteSizeUnit.TB), - "Failed to parse value [6tb] for setting [chunk_size] must be <= 5tb"); - } - - private Settings buildSettings(Settings... global) { - Settings.Builder builder = Settings.builder(); - for (Settings settings : global) { - builder.put(settings); - } - return builder.build(); - } - - private void internalTestInvalidChunkBufferSizeSettings(ByteSizeValue buffer, ByteSizeValue chunk, String expectedMessage) - throws IOException { - Settings nodeSettings = buildSettings(AWS, S3, REPOSITORIES); - RepositorySettings s3RepositorySettings = new RepositorySettings(nodeSettings, Settings.builder() - .put(Repository.BUFFER_SIZE_SETTING.getKey(), buffer) - .put(Repository.CHUNK_SIZE_SETTING.getKey(), chunk) - .build()); - - try { - new S3Repository(new RepositoryName("s3", "s3repo"), s3RepositorySettings, null, null); - fail("We should either raise a NPE or a RepositoryException or a IllegalArgumentException"); - } catch (RepositoryException e) { - assertThat(e.getDetailedMessage(), containsString(expectedMessage)); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString(expectedMessage)); - } catch (NullPointerException e) { - // Because we passed to the CTOR a Null AwsS3Service, we get a NPE which is expected - // in the context of this test - if (expectedMessage != null) { - fail("We should have raised a RepositoryException"); - } - } - } -} diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAwsS3Service.java b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAwsS3Service.java index 36608c8b172..d7c706822a8 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAwsS3Service.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/cloud/aws/TestAwsS3Service.java @@ -23,20 +23,21 @@ import com.amazonaws.services.s3.AmazonS3; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugin.repository.s3.S3RepositoryPlugin; import org.elasticsearch.plugins.Plugin; import java.util.IdentityHashMap; public class TestAwsS3Service extends InternalAwsS3Service { - public static class TestPlugin extends Plugin { - public void onModule(S3Module s3Module) { - S3Module.s3ServiceImpl = TestAwsS3Service.class; + public static class TestPlugin extends S3RepositoryPlugin { + @Override + protected AwsS3Service createStorageService(Settings settings) { + return new TestAwsS3Service(settings); } } IdentityHashMap clients = new IdentityHashMap(); - @Inject public TestAwsS3Service(Settings settings) { super(settings); } @@ -44,8 +45,8 @@ public class TestAwsS3Service extends InternalAwsS3Service { @Override public synchronized AmazonS3 client(String endpoint, Protocol protocol, String region, String account, String key, Integer maxRetries, - boolean useThrottleRetries) { - return cachedWrapper(super.client(endpoint, protocol, region, account, key, maxRetries, useThrottleRetries)); + boolean useThrottleRetries, Boolean pathStyleAccess) { + return cachedWrapper(super.client(endpoint, protocol, region, account, key, maxRetries, useThrottleRetries, pathStyleAccess)); } private AmazonS3 cachedWrapper(AmazonS3 client) { diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java index 2e196610c1a..a3671b42ee4 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java @@ -49,10 +49,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.notNullValue; -/** - */ @ClusterScope(scope = Scope.SUITE, numDataNodes = 2, numClientNodes = 0, transportClientRatio = 0.0) -abstract public class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase { +public abstract class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase { @Override public Settings nodeSettings(int nodeOrdinal) { @@ -200,7 +198,7 @@ abstract public class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase S3Repository.Repositories.REGION_SETTING.get(settings), S3Repository.Repositories.KEY_SETTING.get(settings), S3Repository.Repositories.SECRET_SETTING.get(settings), - null, randomBoolean()); + null, randomBoolean(), null); String bucketName = bucket.get("bucket"); logger.info("--> verify encryption for bucket [{}], prefix [{}]", bucketName, basePath); @@ -476,7 +474,7 @@ abstract public class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase // as described in README assertThat("Your settings in elasticsearch.yml are incorrects. Check README file.", bucketName, notNullValue()); AmazonS3 client = internalCluster().getInstance(AwsS3Service.class).client(endpoint, protocol, region, accessKey, secretKey, - null, randomBoolean()); + null, randomBoolean(), null); try { ObjectListing prevListing = null; //From http://docs.amazonwebservices.com/AmazonS3/latest/dev/DeletingMultipleObjectsUsingJava.html @@ -512,7 +510,7 @@ abstract public class AbstractS3SnapshotRestoreTest extends AbstractAwsTestCase multiObjectDeleteRequest.setKeys(keys); client.deleteObjects(multiObjectDeleteRequest); } - } catch (Throwable ex) { + } catch (Exception ex) { logger.warn("Failed to delete S3 repository [{}] in [{}]", ex, bucketName, region); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java new file mode 100644 index 00000000000..d6cca5d70d6 --- /dev/null +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -0,0 +1,107 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.s3; + +import java.io.IOException; + +import com.amazonaws.Protocol; +import com.amazonaws.services.s3.AbstractAmazonS3; +import com.amazonaws.services.s3.AmazonS3; +import org.elasticsearch.cloud.aws.AwsS3Service; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.repositories.RepositoryException; +import org.elasticsearch.test.ESTestCase; + +import static org.elasticsearch.repositories.s3.S3Repository.Repositories; +import static org.elasticsearch.repositories.s3.S3Repository.Repository; +import static org.elasticsearch.repositories.s3.S3Repository.getValue; +import static org.hamcrest.Matchers.containsString; + +public class S3RepositoryTests extends ESTestCase { + + private static class DummyS3Client extends AbstractAmazonS3 { + @Override + public boolean doesBucketExist(String bucketName) { + return true; + } + } + + private static class DummyS3Service extends AbstractLifecycleComponent implements AwsS3Service { + public DummyS3Service() { + super(Settings.EMPTY); + } + @Override + protected void doStart() {} + @Override + protected void doStop() {} + @Override + protected void doClose() {} + @Override + public AmazonS3 client(String endpoint, Protocol protocol, String region, String account, String key, Integer maxRetries, + boolean useThrottleRetries, Boolean pathStyleAccess) { + return new DummyS3Client(); + } + } + + public void testSettingsResolution() throws Exception { + Settings localSettings = Settings.builder().put(Repository.KEY_SETTING.getKey(), "key1").build(); + Settings globalSettings = Settings.builder().put(Repositories.KEY_SETTING.getKey(), "key2").build(); + + assertEquals("key1", getValue(localSettings, globalSettings, Repository.KEY_SETTING, Repositories.KEY_SETTING)); + assertEquals("key1", getValue(localSettings, Settings.EMPTY, Repository.KEY_SETTING, Repositories.KEY_SETTING)); + assertEquals("key2", getValue(Settings.EMPTY, globalSettings, Repository.KEY_SETTING, Repositories.KEY_SETTING)); + assertEquals("", getValue(Settings.EMPTY, Settings.EMPTY, Repository.KEY_SETTING, Repositories.KEY_SETTING)); + } + + public void testInvalidChunkBufferSizeSettings() throws IOException { + // chunk < buffer should fail + assertInvalidBuffer(10, 5, RepositoryException.class, "chunk_size (5mb) can't be lower than buffer_size (10mb)."); + // chunk > buffer should pass + assertValidBuffer(5, 10); + // chunk = buffer should pass + assertValidBuffer(5, 5); + // buffer < 5mb should fail + assertInvalidBuffer(4, 10, IllegalArgumentException.class, + "Failed to parse value [4mb] for setting [buffer_size] must be >= 5mb"); + // chunk > 5tb should fail + assertInvalidBuffer(5, 6000000, IllegalArgumentException.class, + "Failed to parse value [5.7tb] for setting [chunk_size] must be <= 5tb"); + } + + private void assertValidBuffer(long bufferMB, long chunkMB) throws IOException { + RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() + .put(Repository.BUFFER_SIZE_SETTING.getKey(), new ByteSizeValue(bufferMB, ByteSizeUnit.MB)) + .put(Repository.CHUNK_SIZE_SETTING.getKey(), new ByteSizeValue(chunkMB, ByteSizeUnit.MB)).build()); + new S3Repository(metadata, Settings.EMPTY, new DummyS3Service()); + } + + private void assertInvalidBuffer(int bufferMB, int chunkMB, Class clazz, String msg) throws IOException { + RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() + .put(Repository.BUFFER_SIZE_SETTING.getKey(), new ByteSizeValue(bufferMB, ByteSizeUnit.MB)) + .put(Repository.CHUNK_SIZE_SETTING.getKey(), new ByteSizeValue(chunkMB, ByteSizeUnit.MB)).build()); + + Exception e = expectThrows(clazz, () -> new S3Repository(metadata, Settings.EMPTY, new DummyS3Service())); + assertThat(e.getMessage(), containsString(msg)); + } +} diff --git a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/AbstractAzureFsTestCase.java b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/AbstractAzureFsTestCase.java index 9e29d6f091a..6016b0ca335 100644 --- a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/AbstractAzureFsTestCase.java +++ b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/AbstractAzureFsTestCase.java @@ -28,7 +28,7 @@ import java.util.Collection; import static org.hamcrest.Matchers.is; -abstract public class AbstractAzureFsTestCase extends ESIntegTestCase { +public abstract class AbstractAzureFsTestCase extends ESIntegTestCase { @Override protected Collection> nodePlugins() { return pluginList(SMBStorePlugin.class); diff --git a/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/15_index_creation.yaml b/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/15_index_creation.yaml index f6a964a5dae..d036176e320 100644 --- a/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/15_index_creation.yaml +++ b/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/15_index_creation.yaml @@ -6,10 +6,6 @@ index: store.type: smb_mmap_fs - - do: - cluster.health: - wait_for_status: yellow - - do: index: index: smb-test diff --git a/qa/backwards-5.0/build.gradle b/qa/backwards-5.0/build.gradle index fbce12f8126..657a6b7c078 100644 --- a/qa/backwards-5.0/build.gradle +++ b/qa/backwards-5.0/build.gradle @@ -18,6 +18,6 @@ integTest { cluster { numNodes = 2 numBwcNodes = 1 - bwcVersion = "5.0.0-alpha4-SNAPSHOT" // this is the same as the current version until we released the first RC + bwcVersion = "5.0.0-alpha5-SNAPSHOT" // this is the same as the current version until we released the first RC } } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java index a319aaabb70..d028dfd573a 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/SeccompTests.java @@ -40,7 +40,7 @@ public class SeccompTests extends ESTestCase { if (!JNANatives.LOCAL_SECCOMP_ALL) { try { Seccomp.init(createTempDir()); - } catch (Throwable e) { + } catch (Exception e) { throw new RuntimeException("unable to forcefully apply seccomp to test thread", e); } } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java index e5117fa0aa0..af1f311dd23 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/InstallPluginCommandTests.java @@ -26,7 +26,7 @@ import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cli.Terminal; -import org.elasticsearch.cli.UserError; +import org.elasticsearch.cli.UserException; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; @@ -320,7 +320,7 @@ public class InstallPluginCommandTests extends ESTestCase { public void testUnknownPlugin() throws Exception { Tuple env = createEnv(fs, temp); - UserError e = expectThrows(UserError.class, () -> installPlugin("foo", env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin("foo", env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Unknown plugin foo")); } @@ -350,7 +350,7 @@ public class InstallPluginCommandTests extends ESTestCase { Tuple env = createEnv(fs, temp); Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("lang-groovy", pluginDir); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("is a system module")); assertInstallCleaned(env.v2()); } @@ -385,7 +385,7 @@ public class InstallPluginCommandTests extends ESTestCase { Path pluginDir = createPluginDir(temp); String pluginZip = createPlugin("fake", pluginDir); installPlugin(pluginZip, env.v1()); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("already exists")); assertInstallCleaned(env.v2()); } @@ -407,7 +407,7 @@ public class InstallPluginCommandTests extends ESTestCase { Path binDir = pluginDir.resolve("bin"); Files.createFile(binDir); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertInstallCleaned(env.v2()); } @@ -419,7 +419,7 @@ public class InstallPluginCommandTests extends ESTestCase { Files.createDirectories(dirInBinDir); Files.createFile(dirInBinDir.resolve("somescript")); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in bin dir for plugin")); assertInstallCleaned(env.v2()); } @@ -490,7 +490,7 @@ public class InstallPluginCommandTests extends ESTestCase { Path configDir = pluginDir.resolve("config"); Files.createFile(configDir); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertInstallCleaned(env.v2()); } @@ -502,7 +502,7 @@ public class InstallPluginCommandTests extends ESTestCase { Files.createDirectories(dirInConfigDir); Files.createFile(dirInConfigDir.resolve("myconfig.yml")); String pluginZip = createPlugin("fake", pluginDir); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("Directories not allowed in config dir for plugin")); assertInstallCleaned(env.v2()); } @@ -534,7 +534,7 @@ public class InstallPluginCommandTests extends ESTestCase { Path pluginDir = createPluginDir(temp); Files.createFile(pluginDir.resolve(PluginInfo.ES_PLUGIN_PROPERTIES)); String pluginZip = writeZip(pluginDir, null); - UserError e = expectThrows(UserError.class, () -> installPlugin(pluginZip, env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin(pluginZip, env.v1())); assertTrue(e.getMessage(), e.getMessage().contains("`elasticsearch` directory is missing in the plugin zip")); assertInstallCleaned(env.v2()); } @@ -580,16 +580,16 @@ public class InstallPluginCommandTests extends ESTestCase { public void testInstallMisspelledOfficialPlugins() throws Exception { Tuple env = createEnv(fs, temp); - UserError e = expectThrows(UserError.class, () -> installPlugin("xpack", env.v1())); + UserException e = expectThrows(UserException.class, () -> installPlugin("xpack", env.v1())); assertThat(e.getMessage(), containsString("Unknown plugin xpack, did you mean [x-pack]?")); - e = expectThrows(UserError.class, () -> installPlugin("analysis-smartnc", env.v1())); + e = expectThrows(UserException.class, () -> installPlugin("analysis-smartnc", env.v1())); assertThat(e.getMessage(), containsString("Unknown plugin analysis-smartnc, did you mean [analysis-smartcn]?")); - e = expectThrows(UserError.class, () -> installPlugin("repository", env.v1())); + e = expectThrows(UserException.class, () -> installPlugin("repository", env.v1())); assertThat(e.getMessage(), containsString("Unknown plugin repository, did you mean any of [repository-s3, repository-gcs]?")); - e = expectThrows(UserError.class, () -> installPlugin("unknown_plugin", env.v1())); + e = expectThrows(UserException.class, () -> installPlugin("unknown_plugin", env.v1())); assertThat(e.getMessage(), containsString("Unknown plugin unknown_plugin")); } diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java index 3a4639fa839..e2910be64f0 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/plugins/RemovePluginCommandTests.java @@ -27,7 +27,7 @@ import java.util.HashMap; import java.util.Map; import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.cli.UserError; +import org.elasticsearch.cli.UserException; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -72,7 +72,7 @@ public class RemovePluginCommandTests extends ESTestCase { } public void testMissing() throws Exception { - UserError e = expectThrows(UserError.class, () -> removePlugin("dne", home)); + UserException e = expectThrows(UserException.class, () -> removePlugin("dne", home)); assertTrue(e.getMessage(), e.getMessage().contains("plugin dne not found")); assertRemoveCleaned(env); } @@ -102,7 +102,7 @@ public class RemovePluginCommandTests extends ESTestCase { public void testBinNotDir() throws Exception { Files.createDirectories(env.pluginsFile().resolve("elasticsearch")); - UserError e = expectThrows(UserError.class, () -> removePlugin("elasticsearch", home)); + UserException e = expectThrows(UserException.class, () -> removePlugin("elasticsearch", home)); assertTrue(e.getMessage(), e.getMessage().contains("not a directory")); assertTrue(Files.exists(env.pluginsFile().resolve("elasticsearch"))); // did not remove assertTrue(Files.exists(env.binFile().resolve("elasticsearch"))); diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java index 4199a5d67cd..95550f6f654 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/tribe/TribeUnitTests.java @@ -23,10 +23,12 @@ import org.apache.lucene.util.IOUtils; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.node.Node; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESTestCase; @@ -51,13 +53,13 @@ public class TribeUnitTests extends ESTestCase { private static Node tribe1; private static Node tribe2; - private static final String NODE_MODE = InternalTestCluster.configuredNodeMode(); @BeforeClass public static void createTribes() { Settings baseSettings = Settings.builder() - .put("http.enabled", false) - .put(Node.NODE_MODE_SETTING.getKey(), NODE_MODE) + .put(NetworkModule.HTTP_ENABLED.getKey(), false) + .put("transport.type", "local") + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); tribe1 = new TribeClientNode( @@ -65,14 +67,14 @@ public class TribeUnitTests extends ESTestCase { .put(baseSettings) .put("cluster.name", "tribe1") .put("node.name", "tribe1_node") - .put(DiscoveryNodeService.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) + .put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) .build()).start(); tribe2 = new TribeClientNode( Settings.builder() .put(baseSettings) .put("cluster.name", "tribe2") .put("node.name", "tribe2_node") - .put(DiscoveryNodeService.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) + .put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) .build()).start(); } @@ -93,26 +95,23 @@ public class TribeUnitTests extends ESTestCase { } private static void assertTribeNodeSuccessfullyCreated(Settings extraSettings) throws Exception { - //tribe node doesn't need the node.mode setting, as it's forced local internally anyways. The tribe clients do need it to make sure - //they can find their corresponding tribes using the proper transport - Settings settings = Settings.builder().put("http.enabled", false).put("node.name", "tribe_node") - .put("tribe.t1.node.mode", NODE_MODE).put("tribe.t2.node.mode", NODE_MODE) + //The tribe clients do need it to make sure they can find their corresponding tribes using the proper transport + Settings settings = Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false).put("node.name", "tribe_node") + .put("transport.type", "local").put("discovery.type", "local") + .put("tribe.t1.transport.type", "local").put("tribe.t2.transport.type", "local") + .put("tribe.t1.discovery.type", "local").put("tribe.t2.discovery.type", "local") .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) - .put(Node.NODE_MODE_SETTING.getKey(), NODE_MODE) .put(extraSettings).build(); try (Node node = new Node(settings).start()) { try (Client client = node.client()) { - assertBusy(new Runnable() { - @Override - public void run() { - ClusterState state = client.admin().cluster().prepareState().clear().setNodes(true).get().getState(); - assertThat(state.getClusterName().value(), equalTo("tribe_node_cluster")); - assertThat(state.getNodes().getSize(), equalTo(5)); - for (DiscoveryNode discoveryNode : state.getNodes()) { - assertThat(discoveryNode.getName(), either(equalTo("tribe1_node")).or(equalTo("tribe2_node")) - .or(equalTo("tribe_node")).or(equalTo("tribe_node/t1")).or(equalTo("tribe_node/t2"))); - } + assertBusy(() -> { + ClusterState state = client.admin().cluster().prepareState().clear().setNodes(true).get().getState(); + assertThat(state.getClusterName().value(), equalTo("tribe_node_cluster")); + assertThat(state.getNodes().getSize(), equalTo(5)); + for (DiscoveryNode discoveryNode : state.getNodes()) { + assertThat(discoveryNode.getName(), either(equalTo("tribe1_node")).or(equalTo("tribe2_node")) + .or(equalTo("tribe_node")).or(equalTo("tribe_node/t1")).or(equalTo("tribe_node/t2"))); } }); } diff --git a/qa/evil-tests/src/test/resources/org/elasticsearch/tribe/elasticsearch.yml b/qa/evil-tests/src/test/resources/org/elasticsearch/tribe/elasticsearch.yml index d4fa8d8d130..19b2a7b5dd9 100644 --- a/qa/evil-tests/src/test/resources/org/elasticsearch/tribe/elasticsearch.yml +++ b/qa/evil-tests/src/test/resources/org/elasticsearch/tribe/elasticsearch.yml @@ -1,5 +1,5 @@ cluster.name: tribe_node_cluster tribe.t1.cluster.name: tribe1 tribe.t2.cluster.name: tribe2 -tribe.t1.node_id.seed: 1 -tribe.t2.node_id.seed: 2 +tribe.t1.node.id.seed: 1 +tribe.t2.node.id.seed: 2 diff --git a/qa/smoke-test-client/build.gradle b/qa/smoke-test-client/build.gradle index 0475243467a..260516a5bf6 100644 --- a/qa/smoke-test-client/build.gradle +++ b/qa/smoke-test-client/build.gradle @@ -20,3 +20,7 @@ apply plugin: 'elasticsearch.rest-test' // TODO: this test works, but it isn't really a rest test...should we have another plugin for "non rest test that just needs N clusters?" + +dependencies { + testCompile project(path: ':client:transport', configuration: 'runtime') // randomly swapped in as a transport +} \ No newline at end of file diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index 6297ce244f9..5f54d14eca2 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -25,12 +25,14 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; -import org.elasticsearch.node.Node; -import org.elasticsearch.node.internal.InternalSettingsPreparer; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.transport.MockTcpTransportPlugin; +import org.elasticsearch.transport.client.PreBuiltTransportClient; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -41,6 +43,8 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.URL; import java.nio.file.Path; +import java.util.Collection; +import java.util.Collections; import java.util.Locale; import java.util.concurrent.atomic.AtomicInteger; @@ -73,14 +77,18 @@ public abstract class ESSmokeClientTestCase extends LuceneTestCase { protected String index; private static Client startClient(Path tempDir, TransportAddress... transportAddresses) { - Settings clientSettings = Settings.builder() - .put("node.name", "qa_smoke_client_" + counter.getAndIncrement()) - .put("client.transport.ignore_cluster_name", true) - .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) - .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we require network here! - - TransportClient.Builder transportClientBuilder = TransportClient.builder().settings(clientSettings); - TransportClient client = transportClientBuilder.build().addTransportAddresses(transportAddresses); + Settings.Builder builder = Settings.builder() + .put("node.name", "qa_smoke_client_" + counter.getAndIncrement()) + .put("client.transport.ignore_cluster_name", true) + .put(Environment.PATH_HOME_SETTING.getKey(), tempDir); + final Collection> plugins; + if (random().nextBoolean()) { + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME); + plugins = Collections.singleton(MockTcpTransportPlugin.class); + } else { + plugins = Collections.emptyList(); + } + TransportClient client = new PreBuiltTransportClient(builder.build(), plugins).addTransportAddresses(transportAddresses); logger.info("--> Elasticsearch Java TransportClient started"); diff --git a/core/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java b/qa/smoke-test-http/build.gradle similarity index 83% rename from core/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java rename to qa/smoke-test-http/build.gradle index 8c7b0c1f255..7fa3205537a 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java +++ b/qa/smoke-test-http/build.gradle @@ -17,8 +17,8 @@ * under the License. */ -package org.elasticsearch.common.settings.bar; +apply plugin: 'elasticsearch.rest-test' -//used in SettingsTest -public class BarTestClass { -} +dependencies { + testCompile project(path: ':modules:transport-netty3', configuration: 'runtime') // for http +} \ No newline at end of file diff --git a/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java similarity index 90% rename from core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java rename to qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java index e9ec3787afc..caaa328b1ab 100644 --- a/core/src/test/java/org/elasticsearch/transport/ContextAndHeaderTransportIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ContextAndHeaderTransportIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.transport; +package org.elasticsearch.http; import org.apache.http.message.BasicHeader; import org.elasticsearch.action.ActionListener; @@ -46,8 +46,6 @@ import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.rest.RestController; -import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; @@ -74,9 +72,9 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @ClusterScope(scope = SUITE) -public class ContextAndHeaderTransportIT extends ESIntegTestCase { +public class ContextAndHeaderTransportIT extends HttpSmokeTestCase { private static final List requests = new CopyOnWriteArrayList<>(); - private String randomHeaderKey = randomAsciiOfLength(10); + private static final String CUSTOM_HEADER = "SomeCustomHeader"; private String randomHeaderValue = randomAsciiOfLength(20); private String queryIndex = "query-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT); private String lookupIndex = "lookup-" + randomAsciiOfLength(10).toLowerCase(Locale.ROOT); @@ -85,14 +83,21 @@ public class ContextAndHeaderTransportIT extends ESIntegTestCase { protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() .put(super.nodeSettings(nodeOrdinal)) - .put("script.stored", "true") .put(NetworkModule.HTTP_ENABLED.getKey(), true) .build(); } + @Override + protected boolean ignoreExternalCluster() { + return true; + } + @Override protected Collection> nodePlugins() { - return pluginList(ActionLoggingPlugin.class); + ArrayList> plugins = new ArrayList<>(super.nodePlugins()); + plugins.add(ActionLoggingPlugin.class); + plugins.add(CustomHeadersPlugin.class); + return plugins; } @Before @@ -214,21 +219,18 @@ public class ContextAndHeaderTransportIT extends ESIntegTestCase { } public void testThatRelevantHttpHeadersBecomeRequestHeaders() throws Exception { - String relevantHeaderName = "relevant_" + randomHeaderKey; - for (RestController restController : internalCluster().getInstances(RestController.class)) { - restController.registerRelevantHeaders(relevantHeaderName); - } + final String IRRELEVANT_HEADER = "SomeIrrelevantHeader"; try (Response response = getRestClient().performRequest( - "GET", "/" + queryIndex + "/_search", Collections.emptyMap(), null, - new BasicHeader(randomHeaderKey, randomHeaderValue), new BasicHeader(relevantHeaderName, randomHeaderValue))) { + "GET", "/" + queryIndex + "/_search", + new BasicHeader(CUSTOM_HEADER, randomHeaderValue), new BasicHeader(IRRELEVANT_HEADER, randomHeaderValue))) { assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); List searchRequests = getRequests(SearchRequest.class); assertThat(searchRequests, hasSize(greaterThan(0))); for (RequestAndHeaders requestAndHeaders : searchRequests) { - assertThat(requestAndHeaders.headers.containsKey(relevantHeaderName), is(true)); + assertThat(requestAndHeaders.headers.containsKey(CUSTOM_HEADER), is(true)); // was not specified, thus is not included - assertThat(requestAndHeaders.headers.containsKey(randomHeaderKey), is(false)); + assertThat(requestAndHeaders.headers.containsKey(IRRELEVANT_HEADER), is(false)); } } } @@ -268,27 +270,27 @@ public class ContextAndHeaderTransportIT extends ESIntegTestCase { } private void assertRequestContainsHeader(ActionRequest request, Map context) { - String msg = String.format(Locale.ROOT, "Expected header %s to be in request %s", randomHeaderKey, request.getClass().getName()); + String msg = String.format(Locale.ROOT, "Expected header %s to be in request %s", CUSTOM_HEADER, request.getClass().getName()); if (request instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) request; - msg = String.format(Locale.ROOT, "Expected header %s to be in index request %s/%s/%s", randomHeaderKey, + msg = String.format(Locale.ROOT, "Expected header %s to be in index request %s/%s/%s", CUSTOM_HEADER, indexRequest.index(), indexRequest.type(), indexRequest.id()); } - assertThat(msg, context.containsKey(randomHeaderKey), is(true)); - assertThat(context.get(randomHeaderKey).toString(), is(randomHeaderValue)); + assertThat(msg, context.containsKey(CUSTOM_HEADER), is(true)); + assertThat(context.get(CUSTOM_HEADER).toString(), is(randomHeaderValue)); } /** * a transport client that adds our random header */ private Client transportClient() { - return internalCluster().transportClient().filterWithHeader(Collections.singletonMap(randomHeaderKey, randomHeaderValue)); + return internalCluster().transportClient().filterWithHeader(Collections.singletonMap(CUSTOM_HEADER, randomHeaderValue)); } public static class ActionLoggingPlugin extends Plugin implements ActionPlugin { @Override - public Collection nodeModules() { + public Collection createGuiceModules() { return Collections.singletonList(new ActionLoggingModule()); } @@ -342,4 +344,10 @@ public class ContextAndHeaderTransportIT extends ESIntegTestCase { this.request = request; } } + + public static class CustomHeadersPlugin extends Plugin implements ActionPlugin { + public Collection getRestHeaders() { + return Collections.singleton(CUSTOM_HEADER); + } + } } diff --git a/core/src/test/java/org/elasticsearch/rest/CorsNotSetIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsNotSetIT.java similarity index 73% rename from core/src/test/java/org/elasticsearch/rest/CorsNotSetIT.java rename to qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsNotSetIT.java index 5f272d3e7e2..7cc84354f6f 100644 --- a/core/src/test/java/org/elasticsearch/rest/CorsNotSetIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsNotSetIT.java @@ -17,36 +17,21 @@ * under the License. */ -package org.elasticsearch.rest; +package org.elasticsearch.http; import org.apache.http.message.BasicHeader; import org.elasticsearch.client.Response; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; - -import java.util.Collections; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -/** - * - */ -@ClusterScope(scope = ESIntegTestCase.Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) -public class CorsNotSetIT extends ESIntegTestCase { +public class CorsNotSetIT extends HttpSmokeTestCase { - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(NetworkModule.HTTP_ENABLED.getKey(), true) - .put(super.nodeSettings(nodeOrdinal)).build(); - } public void testCorsSettingDefaultBehaviourDoesNotReturnAnything() throws Exception { String corsValue = "http://localhost:9200"; - try (Response response = getRestClient().performRequest("GET", "/", Collections.emptyMap(), null, + try (Response response = getRestClient().performRequest("GET", "/", new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue))) { assertThat(response.getStatusLine().getStatusCode(), is(200)); assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); @@ -55,7 +40,7 @@ public class CorsNotSetIT extends ESIntegTestCase { } public void testThatOmittingCorsHeaderDoesNotReturnAnything() throws Exception { - try (Response response = getRestClient().performRequest("GET", "/", Collections.emptyMap(), null)) { + try (Response response = getRestClient().performRequest("GET", "/")) { assertThat(response.getStatusLine().getStatusCode(), is(200)); assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); assertThat(response.getHeader("Access-Control-Allow-Credentials"), nullValue()); diff --git a/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsRegexIT.java similarity index 83% rename from core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java rename to qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsRegexIT.java index e6c85553231..a7d3f4156df 100644 --- a/core/src/test/java/org/elasticsearch/rest/CorsRegexIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/CorsRegexIT.java @@ -16,21 +16,16 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.rest; +package org.elasticsearch.http; import org.apache.http.message.BasicHeader; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.jboss.netty.handler.codec.http.HttpHeaders; - -import java.util.Collections; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_CREDENTIALS; import static org.elasticsearch.http.HttpTransportSettings.SETTING_CORS_ALLOW_METHODS; @@ -43,9 +38,7 @@ import static org.hamcrest.Matchers.nullValue; * Test CORS where the allow origin value is a regular expression. */ @ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) -public class CorsRegexIT extends ESIntegTestCase { - - protected static final ESLogger logger = Loggers.getLogger(CorsRegexIT.class); +public class CorsRegexIT extends HttpSmokeTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { @@ -59,14 +52,19 @@ public class CorsRegexIT extends ESIntegTestCase { .build(); } + @Override + protected boolean ignoreExternalCluster() { + return true; + } + public void testThatRegularExpressionWorksOnMatch() throws Exception { String corsValue = "http://localhost:9200"; - try (Response response = getRestClient().performRequest("GET", "/", Collections.emptyMap(), null, + try (Response response = getRestClient().performRequest("GET", "/", new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue))) { assertResponseWithOriginheader(response, corsValue); } corsValue = "https://localhost:9200"; - try (Response response = getRestClient().performRequest("GET", "/", Collections.emptyMap(), null, + try (Response response = getRestClient().performRequest("GET", "/", new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue));) { assertResponseWithOriginheader(response, corsValue); assertThat(response.getHeader("Access-Control-Allow-Credentials"), is("true")); @@ -75,7 +73,7 @@ public class CorsRegexIT extends ESIntegTestCase { public void testThatRegularExpressionReturnsForbiddenOnNonMatch() throws Exception { try { - getRestClient().performRequest("GET", "/", Collections.emptyMap(), null, new BasicHeader("User-Agent", "Mozilla Bar"), + getRestClient().performRequest("GET", "/", new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", "http://evil-host:9200")); fail("request should have failed"); } catch(ResponseException e) { @@ -87,15 +85,14 @@ public class CorsRegexIT extends ESIntegTestCase { } public void testThatSendingNoOriginHeaderReturnsNoAccessControlHeader() throws Exception { - try (Response response = getRestClient().performRequest("GET", "/", Collections.emptyMap(), null, - new BasicHeader("User-Agent", "Mozilla Bar"))) { + try (Response response = getRestClient().performRequest("GET", "/", new BasicHeader("User-Agent", "Mozilla Bar"))) { assertThat(response.getStatusLine().getStatusCode(), is(200)); assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); } } public void testThatRegularExpressionIsNotAppliedWithoutCorrectBrowserOnMatch() throws Exception { - try (Response response = getRestClient().performRequest("GET", "/", Collections.emptyMap(), null)) { + try (Response response = getRestClient().performRequest("GET", "/")) { assertThat(response.getStatusLine().getStatusCode(), is(200)); assertThat(response.getHeader("Access-Control-Allow-Origin"), nullValue()); } @@ -103,9 +100,9 @@ public class CorsRegexIT extends ESIntegTestCase { public void testThatPreFlightRequestWorksOnMatch() throws Exception { String corsValue = "http://localhost:9200"; - try (Response response = getRestClient().performRequest("OPTIONS", "/", Collections.emptyMap(), null, + try (Response response = getRestClient().performRequest("OPTIONS", "/", new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", corsValue), - new BasicHeader(HttpHeaders.Names.ACCESS_CONTROL_REQUEST_METHOD, "GET"));) { + new BasicHeader("Access-Control-Request-Method", "GET"));) { assertResponseWithOriginheader(response, corsValue); assertNotNull(response.getHeader("Access-Control-Allow-Methods")); } @@ -113,9 +110,9 @@ public class CorsRegexIT extends ESIntegTestCase { public void testThatPreFlightRequestReturnsNullOnNonMatch() throws Exception { try { - getRestClient().performRequest("OPTIONS", "/", Collections.emptyMap(), null, new BasicHeader("User-Agent", "Mozilla Bar"), + getRestClient().performRequest("OPTIONS", "/", new BasicHeader("User-Agent", "Mozilla Bar"), new BasicHeader("Origin", "http://evil-host:9200"), - new BasicHeader(HttpHeaders.Names.ACCESS_CONTROL_REQUEST_METHOD, "GET")); + new BasicHeader("Access-Control-Request-Method", "GET")); fail("request should have failed"); } catch(ResponseException e) { Response response = e.getResponse(); diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java new file mode 100644 index 00000000000..f3b5d214fa4 --- /dev/null +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DeprecationHttpIT.java @@ -0,0 +1,214 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.http; + +import org.apache.http.Header; +import org.apache.http.HttpEntity; +import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.logging.LoggerMessageFormat; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.plugins.Plugin; + +import org.hamcrest.Matcher; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.elasticsearch.rest.RestStatus.OK; +import static org.elasticsearch.http.TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE1; +import static org.elasticsearch.http.TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE2; +import static org.elasticsearch.http.TestDeprecationHeaderRestAction.TEST_NOT_DEPRECATED_SETTING; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; + +/** + * Tests {@code DeprecationLogger} uses the {@code ThreadContext} to add response headers. + */ +public class DeprecationHttpIT extends HttpSmokeTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put("force.http.enabled", true) + // change values of deprecated settings so that accessing them is logged + .put(TEST_DEPRECATED_SETTING_TRUE1.getKey(), ! TEST_DEPRECATED_SETTING_TRUE1.getDefault(Settings.EMPTY)) + .put(TEST_DEPRECATED_SETTING_TRUE2.getKey(), ! TEST_DEPRECATED_SETTING_TRUE2.getDefault(Settings.EMPTY)) + // non-deprecated setting to ensure not everything is logged + .put(TEST_NOT_DEPRECATED_SETTING.getKey(), ! TEST_NOT_DEPRECATED_SETTING.getDefault(Settings.EMPTY)) + .build(); + } + + @Override + protected Collection> nodePlugins() { + ArrayList> plugins = new ArrayList<>(super.nodePlugins()); + plugins.add(TestDeprecationPlugin.class); + return plugins; + } + + /** + * Attempts to do a scatter/gather request that expects unique responses per sub-request. + */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/19222") + public void testUniqueDeprecationResponsesMergedTogether() throws IOException { + final String[] indices = new String[randomIntBetween(2, 5)]; + + // add at least one document for each index + for (int i = 0; i < indices.length; ++i) { + indices[i] = "test" + i; + + // create indices with a single shard to reduce noise; the query only deprecates uniquely by index anyway + assertTrue(prepareCreate(indices[i]).setSettings(Settings.builder().put("number_of_shards", 1)).get().isAcknowledged()); + + int randomDocCount = randomIntBetween(1, 2); + + for (int j = 0; j < randomDocCount; ++j) { + index(indices[i], "type", Integer.toString(j), "{\"field\":" + j + "}"); + } + } + + refresh(indices); + + final String commaSeparatedIndices = Stream.of(indices).collect(Collectors.joining(",")); + + final String body = + "{\"query\":{\"bool\":{\"filter\":[{\"" + TestDeprecatedQueryBuilder.NAME + "\":{}}]}}}"; + + // trigger all index deprecations + try (Response response = getRestClient().performRequest("GET", + "/" + commaSeparatedIndices + "/_search", + Collections.emptyMap(), + new StringEntity(body, RestClient.JSON_CONTENT_TYPE))) { + assertThat(response.getStatusLine().getStatusCode(), equalTo(OK.getStatus())); + + final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); + final List> headerMatchers = new ArrayList<>(indices.length); + + for (String index : indices) { + headerMatchers.add(containsString(LoggerMessageFormat.format("[{}] index", (Object)index))); + } + + assertThat(deprecatedWarnings, hasSize(headerMatchers.size())); + for (Matcher headerMatcher : headerMatchers) { + assertThat(deprecatedWarnings, hasItem(headerMatcher)); + } + } + } + + public void testDeprecationWarningsAppearInHeaders() throws IOException { + doTestDeprecationWarningsAppearInHeaders(); + } + + public void testDeprecationHeadersDoNotGetStuck() throws IOException { + doTestDeprecationWarningsAppearInHeaders(); + doTestDeprecationWarningsAppearInHeaders(); + if (rarely()) { + doTestDeprecationWarningsAppearInHeaders(); + } + } + + /** + * Run a request that receives a predictably randomized number of deprecation warnings. + *

+ * Re-running this back-to-back helps to ensure that warnings are not being maintained across requests. + */ + private void doTestDeprecationWarningsAppearInHeaders() throws IOException { + final boolean useDeprecatedField = randomBoolean(); + final boolean useNonDeprecatedSetting = randomBoolean(); + + // deprecated settings should also trigger a deprecation warning + final List> settings = new ArrayList<>(3); + settings.add(TEST_DEPRECATED_SETTING_TRUE1); + + if (randomBoolean()) { + settings.add(TEST_DEPRECATED_SETTING_TRUE2); + } + + if (useNonDeprecatedSetting) { + settings.add(TEST_NOT_DEPRECATED_SETTING); + } + + Collections.shuffle(settings, random()); + + // trigger all deprecations + try (Response response = getRestClient().performRequest("GET", + "/_test_cluster/deprecated_settings", + Collections.emptyMap(), + buildSettingsRequest(settings, useDeprecatedField))) { + assertThat(response.getStatusLine().getStatusCode(), equalTo(OK.getStatus())); + + final List deprecatedWarnings = getWarningHeaders(response.getHeaders()); + final List> headerMatchers = new ArrayList<>(4); + + headerMatchers.add(equalTo(TestDeprecationHeaderRestAction.DEPRECATED_ENDPOINT)); + if (useDeprecatedField) { + headerMatchers.add(equalTo(TestDeprecationHeaderRestAction.DEPRECATED_USAGE)); + } + for (Setting setting : settings) { + if (setting.isDeprecated()) { + headerMatchers.add(containsString(LoggerMessageFormat.format("[{}] setting was deprecated", (Object)setting.getKey()))); + } + } + + assertThat(deprecatedWarnings, hasSize(headerMatchers.size())); + for (Matcher headerMatcher : headerMatchers) { + assertThat(deprecatedWarnings, hasItem(headerMatcher)); + } + } + } + + private List getWarningHeaders(Header[] headers) { + List warnings = new ArrayList<>(); + + for (Header header : headers) { + if (header.getName().equals("Warning")) { + warnings.add(header.getValue()); + } + } + + return warnings; + } + + private HttpEntity buildSettingsRequest(List> settings, boolean useDeprecatedField) throws IOException { + XContentBuilder builder = JsonXContent.contentBuilder(); + + builder.startObject().startArray(useDeprecatedField ? "deprecated_settings" : "settings"); + + for (Setting setting : settings) { + builder.value(setting.getKey()); + } + + builder.endArray().endObject(); + + return new StringEntity(builder.string(), RestClient.JSON_CONTENT_TYPE); + } + +} diff --git a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java similarity index 91% rename from core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java rename to qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java index 21b8706abfb..feca7cd1d5f 100644 --- a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsDisabledIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsDisabledIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.options.detailederrors; +package org.elasticsearch.http; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; @@ -36,7 +36,7 @@ import static org.hamcrest.Matchers.is; * Tests that when disabling detailed errors, a request with the error_trace parameter returns a HTTP 400 */ @ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1) -public class DetailedErrorsDisabledIT extends ESIntegTestCase { +public class DetailedErrorsDisabledIT extends HttpSmokeTestCase { // Build our cluster settings @Override protected Settings nodeSettings(int nodeOrdinal) { @@ -47,9 +47,14 @@ public class DetailedErrorsDisabledIT extends ESIntegTestCase { .build(); } + @Override + protected boolean ignoreExternalCluster() { + return true; + } + public void testThatErrorTraceParamReturns400() throws Exception { try { - getRestClient().performRequest("DELETE", "/", Collections.singletonMap("error_trace", "true"), null); + getRestClient().performRequest("DELETE", "/", Collections.singletonMap("error_trace", "true")); fail("request should have failed"); } catch(ResponseException e) { Response response = e.getResponse(); diff --git a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsEnabledIT.java similarity index 80% rename from core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java rename to qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsEnabledIT.java index 269dc72b253..daabb1bc70d 100644 --- a/core/src/test/java/org/elasticsearch/options/detailederrors/DetailedErrorsEnabledIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/DetailedErrorsEnabledIT.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.options.detailederrors; +package org.elasticsearch.http; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; @@ -35,19 +35,11 @@ import static org.hamcrest.Matchers.not; /** * Tests that by default the error_trace parameter can be used to show stacktraces */ -@ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1) -public class DetailedErrorsEnabledIT extends ESIntegTestCase { - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put(NetworkModule.HTTP_ENABLED.getKey(), true) - .build(); - } +public class DetailedErrorsEnabledIT extends HttpSmokeTestCase { public void testThatErrorTraceWorksByDefault() throws Exception { try { - getRestClient().performRequest("DELETE", "/", Collections.singletonMap("error_trace", "true"), null); + getRestClient().performRequest("DELETE", "/", Collections.singletonMap("error_trace", "true")); fail("request should have failed"); } catch(ResponseException e) { Response response = e.getResponse(); @@ -57,7 +49,7 @@ public class DetailedErrorsEnabledIT extends ESIntegTestCase { } try { - getRestClient().performRequest("DELETE", "/", Collections.emptyMap(), null); + getRestClient().performRequest("DELETE", "/"); fail("request should have failed"); } catch(ResponseException e) { Response response = e.getResponse(); diff --git a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpCompressionIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java similarity index 72% rename from core/src/test/java/org/elasticsearch/http/netty/NettyHttpCompressionIT.java rename to qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java index 4e69624376d..f08bb2b4a9e 100644 --- a/core/src/test/java/org/elasticsearch/http/netty/NettyHttpCompressionIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpCompressionIT.java @@ -16,29 +16,24 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.http.netty; +package org.elasticsearch.http; import org.apache.http.Header; import org.apache.http.HttpException; import org.apache.http.HttpHeaders; import org.apache.http.HttpResponseInterceptor; import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; +import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.message.BasicHeader; import org.apache.http.protocol.HttpContext; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; -import org.elasticsearch.common.network.NetworkModule; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; import java.util.Collections; -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 1, numClientNodes = 1) -public class NettyHttpCompressionIT extends ESIntegTestCase { +public class HttpCompressionIT extends ESIntegTestCase { private static final String GZIP_ENCODING = "gzip"; private static final StringEntity SAMPLE_DOCUMENT = new StringEntity("{\n" + @@ -49,21 +44,16 @@ public class NettyHttpCompressionIT extends ESIntegTestCase { "}", RestClient.JSON_CONTENT_TYPE); @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put(NetworkModule.HTTP_ENABLED.getKey(), true) - .put(HttpTransportSettings.SETTING_HTTP_COMPRESSION.getKey(), true) - .build(); + protected boolean ignoreExternalCluster() { + return false; } public void testCompressesResponseIfRequested() throws Exception { ensureGreen(); // we need to intercept early, otherwise internal logic in HttpClient will just remove the header and we cannot verify it ContentEncodingHeaderExtractor headerExtractor = new ContentEncodingHeaderExtractor(); - try (RestClient client = createRestClient(HttpClients.custom().addInterceptorFirst(headerExtractor).build())) { - try (Response response = client.performRequest("GET", "/", Collections.emptyMap(), null, - new BasicHeader(HttpHeaders.ACCEPT_ENCODING, GZIP_ENCODING))) { + try (RestClient client = createRestClient(new ContentEncodingHeaderExtractorConfigCallback(headerExtractor))) { + try (Response response = client.performRequest("GET", "/", new BasicHeader(HttpHeaders.ACCEPT_ENCODING, GZIP_ENCODING))) { assertEquals(200, response.getStatusLine().getStatusCode()); assertTrue(headerExtractor.hasContentEncodingHeader()); assertEquals(GZIP_ENCODING, headerExtractor.getContentEncodingHeader().getValue()); @@ -74,9 +64,8 @@ public class NettyHttpCompressionIT extends ESIntegTestCase { public void testUncompressedResponseByDefault() throws Exception { ensureGreen(); ContentEncodingHeaderExtractor headerExtractor = new ContentEncodingHeaderExtractor(); - CloseableHttpClient httpClient = HttpClients.custom().disableContentCompression().addInterceptorFirst(headerExtractor).build(); - try (RestClient client = createRestClient(httpClient)) { - try (Response response = client.performRequest("GET", "/", Collections.emptyMap(), null)) { + try (RestClient client = createRestClient(new NoContentCompressionConfigCallback(headerExtractor))) { + try (Response response = client.performRequest("GET", "/")) { assertEquals(200, response.getStatusLine().getStatusCode()); assertFalse(headerExtractor.hasContentEncodingHeader()); } @@ -87,8 +76,7 @@ public class NettyHttpCompressionIT extends ESIntegTestCase { ensureGreen(); ContentEncodingHeaderExtractor headerExtractor = new ContentEncodingHeaderExtractor(); // this disable content compression in both directions (request and response) - CloseableHttpClient httpClient = HttpClients.custom().disableContentCompression().addInterceptorFirst(headerExtractor).build(); - try (RestClient client = createRestClient(httpClient)) { + try (RestClient client = createRestClient(new NoContentCompressionConfigCallback(headerExtractor))) { try (Response response = client.performRequest("POST", "/company/employees/1", Collections.emptyMap(), SAMPLE_DOCUMENT)) { assertEquals(201, response.getStatusLine().getStatusCode()); @@ -101,7 +89,7 @@ public class NettyHttpCompressionIT extends ESIntegTestCase { ensureGreen(); ContentEncodingHeaderExtractor headerExtractor = new ContentEncodingHeaderExtractor(); // we don't call #disableContentCompression() hence the client will send the content compressed - try (RestClient client = createRestClient(HttpClients.custom().addInterceptorFirst(headerExtractor).build())) { + try (RestClient client = createRestClient(new ContentEncodingHeaderExtractorConfigCallback(headerExtractor))) { try (Response response = client.performRequest("POST", "/company/employees/2", Collections.emptyMap(), SAMPLE_DOCUMENT)) { assertEquals(201, response.getStatusLine().getStatusCode()); @@ -131,4 +119,30 @@ public class NettyHttpCompressionIT extends ESIntegTestCase { return contentEncodingHeader; } } + + private static class NoContentCompressionConfigCallback extends ContentEncodingHeaderExtractorConfigCallback { + NoContentCompressionConfigCallback(ContentEncodingHeaderExtractor contentEncodingHeaderExtractor) { + super(contentEncodingHeaderExtractor); + } + + @Override + public void customizeHttpClient(HttpClientBuilder httpClientBuilder) { + super.customizeHttpClient(httpClientBuilder); + httpClientBuilder.disableContentCompression(); + } + } + + private static class ContentEncodingHeaderExtractorConfigCallback implements RestClient.HttpClientConfigCallback { + + private final ContentEncodingHeaderExtractor contentEncodingHeaderExtractor; + + ContentEncodingHeaderExtractorConfigCallback(ContentEncodingHeaderExtractor contentEncodingHeaderExtractor) { + this.contentEncodingHeaderExtractor = contentEncodingHeaderExtractor; + } + + @Override + public void customizeHttpClient(HttpClientBuilder httpClientBuilder) { + httpClientBuilder.addInterceptorFirst(contentEncodingHeaderExtractor); + } + } } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpSmokeTestCase.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpSmokeTestCase.java new file mode 100644 index 00000000000..636d652feb1 --- /dev/null +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/HttpSmokeTestCase.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.http; + +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.MockTcpTransportPlugin; +import org.elasticsearch.transport.Netty3Plugin; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +public abstract class HttpSmokeTestCase extends ESIntegTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal)) + .put("netty.assert.buglevel", false) + .put(NetworkModule.TRANSPORT_TYPE_KEY, randomFrom(Netty3Plugin.NETTY_TRANSPORT_NAME, + MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME)) + .put(NetworkModule.HTTP_ENABLED.getKey(), true).build(); + } + + @Override + protected Collection> nodePlugins() { + return pluginList(MockTcpTransportPlugin.class, Netty3Plugin.class, BogusPlugin.class); + } + + @Override + protected Collection> transportClientPlugins() { + return pluginList(MockTcpTransportPlugin.class, Netty3Plugin.class, BogusPlugin.class); + } + + @Override + protected Settings transportClientSettings() { + return Settings.builder() + .put(super.transportClientSettings()) + .put("netty.assert.buglevel", false) + .put(NetworkModule.TRANSPORT_TYPE_KEY, randomFrom(Netty3Plugin.NETTY_TRANSPORT_NAME, + MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME)).build(); + } + + @Override + protected boolean ignoreExternalCluster() { + return true; + } + + + public static final class BogusPlugin extends Plugin { + // see Netty3Plugin.... this runs without the permission from the netty3 module so it will fail since reindex can't set the property + // to make it still work we disable that check but need to register the setting first + private static final Setting ASSERT_NETTY_BUGLEVEL = Setting.boolSetting("netty.assert.buglevel", true, + Setting.Property.NodeScope); + @Override + public List> getSettings() { + return Collections.singletonList(ASSERT_NETTY_BUGLEVEL); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/plugins/ResponseHeaderPluginIT.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ResponseHeaderPluginIT.java similarity index 80% rename from core/src/test/java/org/elasticsearch/plugins/ResponseHeaderPluginIT.java rename to qa/smoke-test-http/src/test/java/org/elasticsearch/http/ResponseHeaderPluginIT.java index 6a3513416f8..482edc36702 100644 --- a/core/src/test/java/org/elasticsearch/plugins/ResponseHeaderPluginIT.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/ResponseHeaderPluginIT.java @@ -16,19 +16,18 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugins; +package org.elasticsearch.http; import org.apache.http.message.BasicHeader; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugins.responseheader.TestResponseHeaderPlugin; -import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; +import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import static org.hamcrest.Matchers.equalTo; @@ -36,7 +35,7 @@ import static org.hamcrest.Matchers.equalTo; * Test a rest action that sets special response headers */ @ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) -public class ResponseHeaderPluginIT extends ESIntegTestCase { +public class ResponseHeaderPluginIT extends HttpSmokeTestCase { @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() @@ -45,15 +44,22 @@ public class ResponseHeaderPluginIT extends ESIntegTestCase { .build(); } + @Override + protected boolean ignoreExternalCluster() { + return true; + } + @Override protected Collection> nodePlugins() { - return pluginList(TestResponseHeaderPlugin.class); + ArrayList> plugins = new ArrayList<>(super.nodePlugins()); + plugins.add(TestResponseHeaderPlugin.class); + return plugins; } public void testThatSettingHeadersWorks() throws Exception { ensureGreen(); try { - getRestClient().performRequest("GET", "/_protected", Collections.emptyMap(), null); + getRestClient().performRequest("GET", "/_protected"); fail("request should have failed"); } catch(ResponseException e) { Response response = e.getResponse(); @@ -61,8 +67,7 @@ public class ResponseHeaderPluginIT extends ESIntegTestCase { assertThat(response.getHeader("Secret"), equalTo("required")); } - try (Response authResponse = getRestClient().performRequest("GET", "/_protected", Collections.emptyMap(), null, - new BasicHeader("Secret", "password"))) { + try (Response authResponse = getRestClient().performRequest("GET", "/_protected", new BasicHeader("Secret", "password"))) { assertThat(authResponse.getStatusLine().getStatusCode(), equalTo(200)); assertThat(authResponse.getHeader("Secret"), equalTo("granted")); } diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecatedQueryBuilder.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecatedQueryBuilder.java new file mode 100644 index 00000000000..84d564cb2e1 --- /dev/null +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecatedQueryBuilder.java @@ -0,0 +1,101 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http; + +import org.apache.lucene.search.Query; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.lucene.search.Queries; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; +import org.elasticsearch.index.query.QueryShardContext; + +import java.io.IOException; +import java.util.Optional; + +/** + * A query that performs a match_all query, but with each index touched getting a unique deprecation warning. + *

+ * This makes it easy to test multiple unique responses for a single request. + */ +public class TestDeprecatedQueryBuilder extends AbstractQueryBuilder { + public static final String NAME = "deprecated_match_all"; + + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(TestDeprecatedQueryBuilder.class)); + + public TestDeprecatedQueryBuilder() { + // nothing to do + } + + /** + * Read from a stream. + */ + public TestDeprecatedQueryBuilder(StreamInput in) throws IOException { + super(in); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + // nothing to do + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME).endObject(); + } + + public static Optional fromXContent(QueryParseContext parseContext) throws IOException, ParsingException { + XContentParser parser = parseContext.parser(); + + if (parser.nextToken() != XContentParser.Token.END_OBJECT) { + throw new ParsingException(parser.getTokenLocation(), "[{}] query does not have any fields", NAME); + } + + return Optional.of(new TestDeprecatedQueryBuilder()); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected Query doToQuery(QueryShardContext context) throws IOException { + DEPRECATION_LOGGER.deprecated("[{}] query is deprecated, but used on [{}] index", NAME, context.index().getName()); + + return Queries.newMatchAllQuery(); + } + + @Override + public int doHashCode() { + return 0; + } + + @Override + protected boolean doEquals(TestDeprecatedQueryBuilder other) { + return true; + } + +} diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java new file mode 100644 index 00000000000..cff7b55f9de --- /dev/null +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationHeaderRestAction.java @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.http; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Enables testing {@code DeprecationRestHandler} via integration tests by guaranteeing a deprecated REST endpoint. + *

+ * This adds an endpoint named /_test_cluster/deprecated_settings that touches specified settings given their names + * and returns their values. + */ +public class TestDeprecationHeaderRestAction extends BaseRestHandler { + + public static final Setting TEST_DEPRECATED_SETTING_TRUE1 = + Setting.boolSetting("test.setting.deprecated.true1", true, + Setting.Property.NodeScope, Setting.Property.Deprecated, Setting.Property.Dynamic); + public static final Setting TEST_DEPRECATED_SETTING_TRUE2 = + Setting.boolSetting("test.setting.deprecated.true2", true, + Setting.Property.NodeScope, Setting.Property.Deprecated, Setting.Property.Dynamic); + public static final Setting TEST_NOT_DEPRECATED_SETTING = + Setting.boolSetting("test.setting.not_deprecated", false, + Setting.Property.NodeScope, Setting.Property.Dynamic); + + private static final Map> SETTINGS; + + static { + Map> settingsMap = new HashMap<>(3); + + settingsMap.put(TEST_DEPRECATED_SETTING_TRUE1.getKey(), TEST_DEPRECATED_SETTING_TRUE1); + settingsMap.put(TEST_DEPRECATED_SETTING_TRUE2.getKey(), TEST_DEPRECATED_SETTING_TRUE2); + settingsMap.put(TEST_NOT_DEPRECATED_SETTING.getKey(), TEST_NOT_DEPRECATED_SETTING); + + SETTINGS = Collections.unmodifiableMap(settingsMap); + } + + public static final String DEPRECATED_ENDPOINT = "[/_test_cluster/deprecated_settings] exists for deprecated tests"; + public static final String DEPRECATED_USAGE = "[deprecated_settings] usage is deprecated. use [settings] instead"; + + @Inject + public TestDeprecationHeaderRestAction(Settings settings, RestController controller) { + super(settings); + + controller.registerAsDeprecatedHandler(RestRequest.Method.GET, "/_test_cluster/deprecated_settings", this, + DEPRECATED_ENDPOINT, deprecationLogger); + } + + @SuppressWarnings("unchecked") // List casts + @Override + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { + final List settings; + + try (XContentParser parser = XContentFactory.xContent(request.content()).createParser(request.content())) { + final Map source = parser.map(); + + if (source.containsKey("deprecated_settings")) { + deprecationLogger.deprecated(DEPRECATED_USAGE); + + settings = (List)source.get("deprecated_settings"); + } else { + settings = (List)source.get("settings"); + } + } + + final XContentBuilder builder = channel.newBuilder(); + + builder.startObject().startArray("settings"); + for (String setting : settings) { + builder.startObject().field(setting, SETTINGS.get(setting).getRaw(this.settings)).endObject(); + } + builder.endArray().endObject(); + + channel.sendResponse(new BytesRestResponse(RestStatus.OK, builder)); + } +} diff --git a/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationPlugin.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationPlugin.java new file mode 100644 index 00000000000..d047713f0c7 --- /dev/null +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestDeprecationPlugin.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.http; + +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.rest.RestHandler; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static java.util.Collections.singletonList; + +/** + * Adds {@link TestDeprecationHeaderRestAction} for testing deprecation requests via HTTP. + */ +public class TestDeprecationPlugin extends Plugin implements ActionPlugin, SearchPlugin { + + @Override + public List> getRestHandlers() { + return Collections.singletonList(TestDeprecationHeaderRestAction.class); + } + + @Override + public List> getSettings() { + return Arrays.asList( + TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE1, + TestDeprecationHeaderRestAction.TEST_DEPRECATED_SETTING_TRUE2, + TestDeprecationHeaderRestAction.TEST_NOT_DEPRECATED_SETTING); + } + + @Override + public List> getQueries() { + return singletonList(new QuerySpec<>(TestDeprecatedQueryBuilder.NAME, TestDeprecatedQueryBuilder::new, + TestDeprecatedQueryBuilder::fromXContent)); + } + +} diff --git a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderPlugin.java similarity index 96% rename from core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java rename to qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderPlugin.java index 9dfd5b6a93a..398f990a5cf 100644 --- a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderPlugin.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderPlugin.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.plugins.responseheader; +package org.elasticsearch.http; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; diff --git a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderRestAction.java similarity index 91% rename from core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java rename to qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderRestAction.java index 39432bd01ea..a321fc0a457 100644 --- a/core/src/test/java/org/elasticsearch/plugins/responseheader/TestResponseHeaderRestAction.java +++ b/qa/smoke-test-http/src/test/java/org/elasticsearch/http/TestResponseHeaderRestAction.java @@ -16,9 +16,9 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.plugins.responseheader; +package org.elasticsearch.http; -import org.elasticsearch.client.Client; +import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; @@ -32,13 +32,13 @@ import org.elasticsearch.rest.RestStatus; public class TestResponseHeaderRestAction extends BaseRestHandler { @Inject - public TestResponseHeaderRestAction(Settings settings, RestController controller, Client client) { - super(settings, client); + public TestResponseHeaderRestAction(Settings settings, RestController controller) { + super(settings); controller.registerHandler(RestRequest.Method.GET, "/_protected", this); } @Override - public void handleRequest(RestRequest request, RestChannel channel, Client client) { + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) { if ("password".equals(request.header("Secret"))) { RestResponse response = new BytesRestResponse(RestStatus.OK, "Access granted"); response.addHeader("Secret", "granted"); diff --git a/qa/smoke-test-ingest-with-all-dependencies/build.gradle b/qa/smoke-test-ingest-with-all-dependencies/build.gradle index 9c9943a1712..df90bf5b982 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/build.gradle +++ b/qa/smoke-test-ingest-with-all-dependencies/build.gradle @@ -29,7 +29,7 @@ dependencies { integTest { cluster { - plugin 'ingest-geoip', project(':plugins:ingest-geoip') + plugin ':plugins:ingest-geoip' setting 'script.inline', 'true' setting 'path.scripts', "${project.buildDir}/resources/test/scripts" } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yaml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yaml index ba07943d3d9..b0a729a6299 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yaml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yaml @@ -231,14 +231,12 @@ { "foreach": { "field": "values", - "processors": [ - { + "processor": { "append": { "field": "values_flat", "value": "{{_value.key}}_{{_value.value}}" } - } - ] + } } } ] @@ -326,3 +324,29 @@ - length: { docs.0.processor_results.1.doc._source: 2 } - match: { docs.0.processor_results.1.doc._source.foo: "bar" } - match: { docs.0.processor_results.1.doc._source.error: "processor rename-status [rename]: field [status] doesn't exist" } + +--- +"Test invalid mustache template": + - do: + cluster.health: + wait_for_status: green + + - do: + catch: request + ingest.put_pipeline: + id: "my_pipeline_1" + body: > + { + "description": "_description", + "processors": [ + { + "set" : { + "field" : "field4", + "value": "{{#join}}{{/join}}" + } + } + ] + } + - match: { error.header.processor_type: "set" } + - match: { error.type: "general_script_exception" } + - match: { error.reason: "Failed to compile inline script [{{#join}}{{/join}}] using lang [mustache]" } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yaml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yaml index 7ccaa64b9f4..9a52979b930 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yaml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yaml @@ -82,13 +82,11 @@ { "foreach" : { "field" : "friends", - "processors" : [ - { + "processor" : { "remove" : { "field" : "_value.id" } - } - ] + } } }, { @@ -106,13 +104,11 @@ { "foreach" : { "field" : "address", - "processors" : [ - { + "processor" : { "trim" : { "field" : "_value" } - } - ] + } } }, { diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_with_ingest.yaml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yaml similarity index 100% rename from qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_with_ingest.yaml rename to qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yaml diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_with_ingest.yaml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yaml similarity index 100% rename from qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_with_ingest.yaml rename to qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yaml diff --git a/qa/smoke-test-plugins/build.gradle b/qa/smoke-test-plugins/build.gradle index bc8eace704e..ab69b02fc8c 100644 --- a/qa/smoke-test-plugins/build.gradle +++ b/qa/smoke-test-plugins/build.gradle @@ -25,8 +25,7 @@ ext.pluginsCount = 0 project.rootProject.subprojects.findAll { it.parent.path == ':plugins' }.each { subproj -> integTest { cluster { - // need to get a non-decorated project object, so must re-lookup the project by path - plugin subproj.name, project(subproj.path) + plugin subproj.path } } pluginsCount += 1 diff --git a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/10_script.yaml b/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/10_script.yaml index 0e99d862fd1..a56eb036c6d 100644 --- a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/10_script.yaml +++ b/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/10_script.yaml @@ -90,9 +90,6 @@ mappings: tweet: _parent: { type: "user" } - - do: - cluster.health: - wait_for_status: yellow - do: index: @@ -192,9 +189,6 @@ mappings: tweet: _parent: { type: "user" } - - do: - cluster.health: - wait_for_status: yellow - do: index: diff --git a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml b/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml index 17e915fdc19..a982609e3a1 100644 --- a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml +++ b/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/reindex/40_search_failures.yaml @@ -17,8 +17,9 @@ index: source query: script: - lang: painless - script: throw new IllegalArgumentException("Cats!") + script: + lang: painless + inline: throw new IllegalArgumentException("Cats!") dest: index: dest - match: {created: 0} diff --git a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/40_search_failure.yaml b/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/40_search_failure.yaml index d1ba9808fba..9960e2cd8cc 100644 --- a/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/40_search_failure.yaml +++ b/qa/smoke-test-reindex-with-painless/src/test/resources/rest-api-spec/test/update_by_query/40_search_failure.yaml @@ -16,8 +16,9 @@ body: query: script: - lang: painless - script: throw new IllegalArgumentException("Cats!") + script: + lang: painless + inline: throw new IllegalArgumentException("Cats!") - match: {updated: 0} - match: {version_conflicts: 0} - match: {batches: 0} diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 878e9c5ef7f..86523a734ad 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -30,7 +30,7 @@ List availableBoxes = [ 'centos-6', 'centos-7', 'debian-8', - 'fedora-22', + 'fedora-24', 'oel-6', 'oel-7', 'opensuse-13', diff --git a/qa/vagrant/src/test/resources/packaging/scripts/20_tar_package.bats b/qa/vagrant/src/test/resources/packaging/scripts/20_tar_package.bats index f6f4d3e76d0..eca0c650901 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/20_tar_package.bats +++ b/qa/vagrant/src/test/resources/packaging/scripts/20_tar_package.bats @@ -97,7 +97,7 @@ setup() { stop_elasticsearch_service } -@test "[TAR]" start Elasticsearch with custom JVM options { +@test "[TAR] start Elasticsearch with custom JVM options" { local es_java_opts=$ES_JAVA_OPTS local es_jvm_options=$ES_JVM_OPTIONS local temp=`mktemp -d` @@ -115,7 +115,7 @@ setup() { export ES_JAVA_OPTS=$es_java_opts } -@test "[TAR]" start Elasticsearch with unquoted JSON option { +@test "[TAR] start Elasticsearch with unquoted JSON option" { local es_java_opts=$ES_JAVA_OPTS local es_jvm_options=$ES_JVM_OPTIONS local temp=`mktemp -d` @@ -133,6 +133,6 @@ setup() { export ES_JAVA_OPTS=$es_java_opts } -@test "[TAR]" remove tar { +@test "[TAR] remove tar" { rm -rf "/tmp/elasticsearch" } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/example/scripts/is_guide.mustache b/qa/vagrant/src/test/resources/packaging/scripts/example/scripts/is_guide.mustache index faf5b4fdbe2..a72544f7820 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/example/scripts/is_guide.mustache +++ b/qa/vagrant/src/test/resources/packaging/scripts/example/scripts/is_guide.mustache @@ -1,7 +1,9 @@ { "query": { "script": { - "script_file": "is_guide" + "script": { + "file": "is_guide" + } } } } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash index c17df96937a..b44e5885ff8 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/module_and_plugin_test_cases.bash @@ -210,7 +210,7 @@ fi } @test "[$GROUP] install discovery-azure-classic plugin" { - install_and_check_plugin discovery azure azure-core-*.jar + install_and_check_plugin discovery azure-classic azure-core-*.jar } @test "[$GROUP] install discovery-ec2 plugin" { @@ -228,6 +228,10 @@ fi install_and_check_plugin ingest geoip geoip2-*.jar jackson-annotations-*.jar jackson-databind-*.jar maxmind-db-*.jar } +@test "[$GROUP] install ingest-user-agent plugin" { + install_and_check_plugin ingest user-agent +} + @test "[$GROUP] check ingest-common module" { check_module ingest-common jcodings-*.jar joni-*.jar } @@ -357,6 +361,10 @@ fi remove_plugin ingest-geoip } +@test "[$GROUP] remove ingest-user-agent plugin" { + remove_plugin ingest-user-agent +} + @test "[$GROUP] remove javascript plugin" { remove_plugin lang-javascript } diff --git a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash index c4dc8c96f58..37daf0ae5b3 100644 --- a/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash +++ b/qa/vagrant/src/test/resources/packaging/scripts/packaging_test_utils.bash @@ -475,7 +475,9 @@ run_elasticsearch_tests() { curl -s -XPOST 'http://localhost:9200/library/book/_count?pretty' -d '{ "query": { "script": { - "script_file": "is_guide" + "script": { + "file": "is_guide" + } } } }' | grep \"count\"\ :\ 1 diff --git a/qa/vagrant/versions b/qa/vagrant/versions index 5f6e0edf99c..98cb5bf8195 100644 --- a/qa/vagrant/versions +++ b/qa/vagrant/versions @@ -11,3 +11,4 @@ 2.3.1 2.3.2 2.3.3 +2.3.4 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.allocation_explain.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.allocation_explain.json index 505c163497e..26b24cfb697 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.allocation_explain.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.allocation_explain.json @@ -10,6 +10,10 @@ "include_yes_decisions": { "type": "boolean", "description": "Return 'YES' decisions in explanation (default: false)" + }, + "include_disk_info": { + "type": "boolean", + "description": "Return information about disk usage and shard sizes (default: false)" } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json index b622d01e60c..c00f8635773 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json @@ -38,6 +38,11 @@ "type" : "string", "description" : "Wait until the specified number of nodes is available" }, + "wait_for_events": { + "type" : "enum", + "options" : ["immediate", "urgent", "high", "normal", "low", "languid"], + "description" : "Wait until all currently queued events with the given priorty are processed" + }, "wait_for_relocating_shards": { "type" : "number", "description" : "Wait until the specified number of relocating shards is finished" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json index d1f10dbfbfc..e6ebc7628b7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json @@ -195,7 +195,7 @@ "requests_per_second": { "type": "float", "default": 0, - "description": "The throttle for this request in sub-requests per second. 0 means set no throttle." + "description": "The throttle for this request in sub-requests per second. -1 means set no throttle." } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json index bdac6d9a9ab..b0267eae3a3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.create.json @@ -13,6 +13,10 @@ } }, "params": { + "wait_for_active_shards": { + "type" : "string", + "description" : "Set the number of active shards to wait for before the operation returns." + }, "timeout": { "type" : "time", "description" : "Explicit operation timeout" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json index 218871a9765..97580182ea1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.rollover.json @@ -25,6 +25,10 @@ "master_timeout": { "type" : "time", "description" : "Specify timeout for connection to master" + }, + "wait_for_active_shards": { + "type" : "string", + "description" : "Set the number of active shards to wait for on the newly created rollover index before the operation returns." } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shrink.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shrink.json index 633e9e16093..5ef943eacba 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shrink.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shrink.json @@ -25,6 +25,10 @@ "master_timeout": { "type" : "time", "description" : "Specify timeout for connection to master" + }, + "wait_for_active_shards": { + "type" : "string", + "description" : "Set the number of active shards to wait for on the shrunken index before the operation returns." } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/template.msearch.json b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json similarity index 97% rename from rest-api-spec/src/main/resources/rest-api-spec/api/template.msearch.json rename to rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json index 379b7bdf362..39aa53b2572 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/template.msearch.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/msearch_template.json @@ -1,5 +1,5 @@ { - "template.msearch": { + "msearch_template": { "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html", "methods": ["GET", "POST"], "url": { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json index 80210a2048e..1ff8f7e03c4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.json @@ -29,7 +29,7 @@ "requests_per_second": { "type": "float", "default": 0, - "description": "The throttle for this request in sub-requests per second. 0 means set no throttle." + "description": "The throttle to set on this request in sub-requests per second. -1 means set no throttle as does \"unlimited\" which is the only non-float this accepts." } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json index 10d6321212d..ddb7dcc4dc9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/reindex.rethrottle.json @@ -15,7 +15,7 @@ "requests_per_second": { "type": "float", "required": true, - "description": "The throttle to set on this request in sub-requests per second. 0 means set no throttle. As does \"unlimited\". Otherwise it must be a float." + "description": "The throttle to set on this request in sub-requests per second. -1 means set no throttle as does \"unlimited\" which is the only non-float this accepts." } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index d2b9b8cf9b4..21fda8dc805 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -38,13 +38,17 @@ "type" : "boolean", "description" : "Specify whether to return detailed information about score computation as part of a hit" }, - "fields": { + "stored_fields": { "type" : "list", - "description" : "A comma-separated list of fields to return as part of a hit" + "description" : "A comma-separated list of stored fields to return as part of a hit" + }, + "docvalue_fields": { + "type" : "list", + "description" : "A comma-separated list of fields to return as the docvalue representation of a field for each hit" }, "fielddata_fields": { "type" : "list", - "description" : "A comma-separated list of fields to return as the field data representation of a field for each hit" + "description" : "A comma-separated list of fields to return as the docvalue representation of a field for each hit" }, "from": { "type" : "number", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/template.search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/template.search.json deleted file mode 100644 index 6a2a8c1d7e2..00000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/template.search.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "template.search": { - "documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html", - "methods": ["GET", "POST"], - "url": { - "path": "/_search/template", - "paths": ["/_search/template", "/{index}/_search/template", "/{index}/{type}/_search/template"], - "parts": { - "index": { - "type" : "list", - "description" : "A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices" - }, - "type": { - "type" : "list", - "description" : "A comma-separated list of document types to search; leave empty to perform the operation on all types" - } - }, - "params" : { - "ignore_unavailable": { - "type" : "boolean", - "description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)" - }, - "allow_no_indices": { - "type" : "boolean", - "description" : "Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)" - }, - "expand_wildcards": { - "type" : "enum", - "options" : ["open","closed","none","all"], - "default" : "open", - "description" : "Whether to expand wildcard expression to concrete indices that are open, closed or both." - }, - "preference": { - "type" : "string", - "description" : "Specify the node or shard the operation should be performed on (default: random)" - }, - "routing": { - "type" : "list", - "description" : "A comma-separated list of specific routing values" - }, - "scroll": { - "type" : "duration", - "description" : "Specify how long a consistent view of the index should be maintained for scrolled search" - }, - "search_type": { - "type" : "enum", - "options" : ["query_then_fetch", "query_and_fetch", "dfs_query_then_fetch", "dfs_query_and_fetch"], - "description" : "Search operation type" - } - } - }, - "body": { - "description": "The search definition template and its params" - } - } -} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json index 4a3f134301d..d91b9003148 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update.json @@ -53,16 +53,6 @@ "type": "string", "description": "Specific routing value" }, - "script": { - "description": "The URL-encoded script definition (instead of using request body)" - }, - "script_id": { - "description": "The id of a stored script" - }, - "scripted_upsert": { - "type": "boolean", - "description": "True if the script referenced in script or script_id should be called to perform inserts - defaults to false" - }, "timeout": { "type": "time", "description": "Explicit operation timeout" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json index 313dbe00c46..b945c3bc659 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json @@ -203,7 +203,7 @@ "requests_per_second": { "type": "float", "default": 0, - "description": "The throttle for this request in sub-requests per second. 0 means set no throttle." + "description": "The throttle to set on this request in sub-requests per second. -1 means set no throttle as does \"unlimited\" which is the only non-float this accepts." } } }, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc b/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc index 688d8cbdc5b..4e88cef4c9f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/README.asciidoc @@ -20,6 +20,7 @@ Test file structure A YAML test file consists of: * an optional `setup` section, followed by +* an optional `teardown` section, followed by * one or more test sections For instance: @@ -28,6 +29,10 @@ For instance: - do: .... - do: .... + --- + teardown: + - do: .... + --- "First test": - do: ... @@ -42,6 +47,11 @@ For instance: A `setup` section contains a list of commands to run before each test section in order to setup the same environment for each test section. +A `teardown` section contains a list of commands to run after each test +section in order to setup the same environment for each test section. This +may be needed for modifications made by the testthat are not cleared by the +deletion of indices and templates. + A test section represents an independent test, containing multiple `do` statements and assertions. The contents of a test section must be run in order, but individual test sections may be run in any order, as follows: @@ -49,9 +59,8 @@ order, but individual test sections may be run in any order, as follows: 1. run `setup` (if any) 2. reset the `response` var and the `stash` (see below) 2. run test contents -3. run teardown - -The `teardown` should delete all indices and all templates. +3. run `teardown` (if any) +4. delete all indices and all templates Dot notation: ------------- diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/20_headers.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/20_headers.yaml index b814856144b..c5a806c5615 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/20_headers.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/20_headers.yaml @@ -1,7 +1,7 @@ --- "Simple alias with yaml body through Accept header": - skip: - features: headers + features: ["headers", "yaml"] - do: indices.create: @@ -17,12 +17,8 @@ headers: Accept: application/yaml - - match: - $body: | - /^---\n - -\s+alias:\s+"test_alias"\s+ - index:\s+"test"\s+ - filter:\s+"-"\s+ - routing.index:\s+"-"\s+ - routing.search:\s+"-"\s+$/ - + - match: {0.alias: test_alias} + - match: {0.index: test} + - match: {0.filter: "-"} + - match: {0.routing\.index: "-"} + - match: {0.routing\.search: "-"} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/30_json.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/30_json.yaml new file mode 100644 index 00000000000..178b77ce60d --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/30_json.yaml @@ -0,0 +1,21 @@ +--- +"Simple alias with json body through format argument": + + - do: + indices.create: + index: test + + - do: + indices.put_alias: + index: test + name: test_alias + + - do: + cat.aliases: + format: json + + - match: {0.alias: test_alias} + - match: {0.index: test} + - match: {0.filter: "-"} + - match: {0.routing\.index: "-"} + - match: {0.routing\.search: "-"} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/30_yaml.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/30_yaml.yaml deleted file mode 100644 index c892891f08f..00000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/30_yaml.yaml +++ /dev/null @@ -1,29 +0,0 @@ ---- -"Simple alias with yaml body through format argument": - - - skip: - features: yaml - - - do: - indices.create: - index: test - - - do: - indices.put_alias: - index: test - name: test_alias - - - do: - cat.aliases: - format: yaml - - - match: - $body: | - /^---\n - -\s+alias:\s+"test_alias"\s+ - index:\s+"test"\s+ - filter:\s+"-"\s+ - routing.index:\s+"-"\s+ - routing.search:\s+"-"\s+$/ - - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml index c1e25c88b95..7f724831bd8 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.allocation/10_basic.yaml @@ -46,10 +46,6 @@ indices.create: index: test - - do: - cluster.health: - wait_for_status: yellow - - do: cat.allocation: {} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml index 51f8fe9ed4c..d2c72f5a604 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/10_basic.yaml @@ -15,9 +15,6 @@ settings: number_of_shards: "1" number_of_replicas: "0" - - do: - cluster.health: - wait_for_status: yellow - do: cat.indices: {} @@ -26,6 +23,7 @@ /^(green \s+ open \s+ index1 \s+ + ([a-zA-Z0-9=/_+]|[\\\-]){22} \s+ 1 \s+ 0 \s+ 0 \s+ @@ -62,6 +60,7 @@ /^( \s+ close \s+ index1 \s+ + ([a-zA-Z0-9=/_+]|[\\\-]){22} \s+ \s+ \s+ \s+ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml index fc596dd5792..a759efec9b8 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yaml @@ -15,9 +15,6 @@ id: 1 body: { foo: bar } refresh: true - - do: - cluster.health: - wait_for_status: yellow - do: cat.recovery: h: i,s,t,ty,st,shost,thost,rep,snap,f,fr,fp,tf,b,br,bp,tb,to,tor,top diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml index 28da3be4511..ebdffd03ed6 100755 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yaml @@ -139,9 +139,6 @@ settings: number_of_shards: "5" number_of_replicas: "1" - - do: - cluster.health: - wait_for_status: yellow - do: cat.shards: {} @@ -158,7 +155,6 @@ number_of_replicas: "0" - do: cluster.health: - wait_for_status: yellow wait_for_relocating_shards: 0 - do: @@ -185,7 +181,6 @@ shared_filesystem: false - do: cluster.health: - wait_for_status: yellow wait_for_relocating_shards: 0 - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yaml index 0163ffae3ef..f0195788e07 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.allocation_explain/10_basic.yaml @@ -9,10 +9,6 @@ indices.create: index: test - - do: - cluster.health: - wait_for_status: yellow - - do: cluster.state: metric: [ master_node ] @@ -49,10 +45,6 @@ index: test body: { "index.number_of_shards": 1, "index.number_of_replicas": 9 } - - do: - cluster.health: - wait_for_status: yellow - - do: cluster.state: metric: [ master_node ] @@ -60,7 +52,8 @@ - set: {master_node: node_id} - do: - cluster.allocation_explain: {} + cluster.allocation_explain: + include_disk_info: true - match: { assigned: false } - match: { unassigned_info.reason: "INDEX_CREATED" } @@ -68,6 +61,7 @@ - match: { shard.index: "test" } - match: { shard.id: 0 } - match: { shard.primary: false } + - is_true: cluster_info # - is_true: nodes.$node_id.node_name # - match: { nodes.$node_id.node_attributes.testattr: "test" } # - match: { nodes.$node_id.node_attributes.portsfile: "true" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yaml index ba1881e4239..f803f3d589c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.health/10_basic.yaml @@ -46,7 +46,6 @@ index: test_index - do: cluster.health: - wait_for_status: yellow level: indices - is_true: indices diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/50_parent.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/50_parent.yaml index 44f1ed880f4..cac5387370d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/50_parent.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/50_parent.yaml @@ -9,10 +9,6 @@ test: _parent: { type: "foo" } - - do: - cluster.health: - wait_for_status: yellow - - do: catch: /routing_missing_exception/ create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/60_refresh.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/60_refresh.yaml index 90dc28bcfc0..5cdc1a20583 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/60_refresh.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/60_refresh.yaml @@ -8,10 +8,6 @@ settings: index.refresh_interval: -1 number_of_replicas: 0 - - do: - cluster.health: - wait_for_status: yellow - - do: create: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yaml index 857cf76a527..6f67b3a03f4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yaml @@ -14,9 +14,6 @@ id: 1 routing: 5 body: { foo: bar } - - do: - cluster.health: - wait_for_status: yellow - do: catch: missing diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/40_parent.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/40_parent.yaml index cd250de3310..d9aa6870460 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/40_parent.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/40_parent.yaml @@ -9,9 +9,6 @@ mappings: test: _parent: { type: "foo" } - - do: - cluster.health: - wait_for_status: yellow - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/30_parent.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/30_parent.yaml index 0f84a1c07d3..91fdf027c13 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/exists/30_parent.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/exists/30_parent.yaml @@ -7,9 +7,6 @@ setup: mappings: test: _parent: { type: "foo" } - - do: - cluster.health: - wait_for_status: yellow --- "Parent": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yaml index 1a6c57848e0..1923377ba83 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/explain/10_basic.yaml @@ -5,10 +5,6 @@ setup: body: aliases: alias_1: {} - - do: - cluster.health: - wait_for_status: yellow - - do: index: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/field_stats/10_basics.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/field_stats/10_basics.yaml index eb1d1c6f758..35cc19224ec 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/field_stats/10_basics.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/field_stats/10_basics.yaml @@ -54,6 +54,7 @@ setup: - match: { indices._all.fields.foo.doc_count: 2 } - match: { indices._all.fields.foo.min_value: "babar" } - match: { indices._all.fields.foo.max_value: "bar" } + - match: { indices._all.fields.foo.type: "string" } - is_false: indices._all.fields.foo.min_value_as_string - is_false: indices._all.fields.foo.max_value_as_string - match: { indices._all.fields.foo.searchable: true } @@ -66,6 +67,7 @@ setup: - match: { indices._all.fields.number.min_value_as_string: "123" } - match: { indices._all.fields.number.max_value: 456 } - match: { indices._all.fields.number.max_value_as_string: "456" } + - match: { indices._all.fields.number.type: "integer" } - is_false: conflicts --- @@ -83,6 +85,7 @@ setup: - is_false: indices.test_1.fields.foo.max_value_as_string - match: { indices.test_1.fields.foo.searchable: true } - match: { indices.test_1.fields.foo.aggregatable: false } + - match: { indices.test_1.fields.foo.type: "string" } - match: { indices.test_1.fields.number.max_doc: 1 } - match: { indices.test_1.fields.number.doc_count: 1 } - match: { indices.test_1.fields.number.searchable: true } @@ -91,14 +94,17 @@ setup: - match: { indices.test_1.fields.number.min_value_as_string: "123" } - match: { indices.test_1.fields.number.max_value: 123 } - match: { indices.test_1.fields.number.max_value_as_string: "123" } + - match: { indices.test_1.fields.number.type: "integer" } - match: { indices.test_2.fields.foo.max_doc: 1 } - match: { indices.test_2.fields.foo.doc_count: 1 } - match: { indices.test_2.fields.foo.min_value: "babar" } - match: { indices.test_2.fields.foo.max_value: "babar" } + - match: { indices.test_2.fields.foo.type: "string" } - is_false: indices.test_2.fields.foo.min_value_as_string - is_false: indices.test_2.fields.foo.max_value_as_string - match: { indices.test_2.fields.foo.searchable: true } - match: { indices.test_2.fields.foo.aggregatable: false } + - match: { indices.test_2.fields.foo.type: "string" } - match: { indices.test_2.fields.number.max_doc: 1 } - match: { indices.test_2.fields.number.doc_count: 1 } - match: { indices.test_2.fields.number.searchable: true } @@ -107,6 +113,7 @@ setup: - match: { indices.test_2.fields.number.min_value_as_string: "456" } - match: { indices.test_2.fields.number.max_value: 456 } - match: { indices.test_2.fields.number.max_value_as_string: "456" } + - match: { indices.test_2.fields.number.type: "integer" } - is_false: conflicts --- @@ -124,6 +131,7 @@ setup: - match: { indices.test_1.fields.foo.aggregatable: false } - match: { indices.test_1.fields.foo.min_value: "bar" } - match: { indices.test_1.fields.foo.max_value: "bar" } + - match: { indices.test_1.fields.foo.type: "string" } - is_false: indices.test_1.fields.number - is_false: conflicts @@ -156,6 +164,7 @@ setup: - match: { indices._all.fields.foo.max_value: "bar" } - match: { indices._all.fields.foo.searchable: true } - match: { indices._all.fields.foo.aggregatable: false } + - match: { indices._all.fields.foo.type: "string" } - match: { indices._all.fields.number.max_doc: 2 } - match: { indices._all.fields.number.doc_count: 2 } - match: { indices._all.fields.number.searchable: true } @@ -164,5 +173,6 @@ setup: - match: { indices._all.fields.number.min_value_as_string: "123" } - match: { indices._all.fields.number.max_value: 456 } - match: { indices._all.fields.number.max_value_as_string: "456" } - - match: { conflicts.bar: "Field [bar] of type [whole-number] conflicts with existing field of type [text] in other index." } + - match: { indices._all.fields.number.type: "integer" } + - match: { conflicts.bar: "Field [bar] of type [integer] conflicts with existing field of type [string] in other index." } - is_false: indices._all.fields.bar diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/30_parent.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/30_parent.yaml index b9a0f46f900..4a842cee2e0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/30_parent.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/30_parent.yaml @@ -7,9 +7,6 @@ setup: mappings: test: _parent: { type: "foo" } - - do: - cluster.health: - wait_for_status: yellow - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml index 1bb031f0878..b88dbaafc4f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/50_with_headers.yaml @@ -1,7 +1,7 @@ --- "REST test with headers": - skip: - features: headers + features: ["headers", "yaml"] - do: index: @@ -18,13 +18,9 @@ type: _all id: 1 - - match: - $body: | - /^---\n - _index:\s+\"test_1"\n - _type:\s+"test"\n - _id:\s+"1"\n - _version:\s+1\n - found:\s+true\n - _source:\n - \s+body:\s+"foo"\n$/ + - match: {_index: "test_1"} + - match: {_type: "test"} + - match: {_id: "1"} + - match: {_version: 1} + - match: {found: true} + - match: { _source: { body: foo }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/30_parent.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/30_parent.yaml index 35edb909131..8c1088e19bb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/30_parent.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/30_parent.yaml @@ -7,9 +7,6 @@ setup: mappings: test: _parent: { type: "foo" } - - do: - cluster.health: - wait_for_status: yellow - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yaml index 370f68d9504..6ec261ac61d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get_source/85_source_missing.yaml @@ -7,9 +7,6 @@ setup: mappings: test: _source: { enabled: false } - - do: - cluster.health: - wait_for_status: yellow - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/50_parent.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/50_parent.yaml index 59e166c99a2..916e1ac3520 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/50_parent.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/50_parent.yaml @@ -8,9 +8,6 @@ mappings: test: _parent: { type: "foo" } - - do: - cluster.health: - wait_for_status: yellow - do: catch: /routing_missing_exception/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yaml index 4ee26411432..2175128debb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yaml @@ -8,9 +8,6 @@ settings: index.refresh_interval: -1 number_of_replicas: 0 - - do: - cluster.health: - wait_for_status: yellow - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml index 10dc63cd109..35d4a2b5222 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.analyze/10_analyze.yaml @@ -35,10 +35,6 @@ setup: text: type: text analyzer: whitespace - - do: - cluster.health: - wait_for_status: yellow - - do: indices.analyze: @@ -99,3 +95,39 @@ setup: - match: { detail.tokenfilters.0.tokens.2.token: troubl } - match: { detail.tokenfilters.0.tokens.2.keyword: false } +--- +"Custom filter in request": + - do: + indices.analyze: + body: { "text": "Foo Bar Buzz", "filter": ["lowercase", { "type": "stop", "stopwords": ["foo", "buzz"]}], "tokenizer": whitespace, "explain": true } + - length: {detail.tokenizer.tokens: 3 } + - length: {detail.tokenfilters.0.tokens: 3 } + - length: {detail.tokenfilters.1.tokens: 1 } + - match: { detail.tokenizer.name: whitespace } + - match: { detail.tokenizer.tokens.0.token: Foo } + - match: { detail.tokenizer.tokens.1.token: Bar } + - match: { detail.tokenizer.tokens.2.token: Buzz } + - match: { detail.tokenfilters.0.name: lowercase } + - match: { detail.tokenfilters.0.tokens.0.token: foo } + - match: { detail.tokenfilters.0.tokens.1.token: bar } + - match: { detail.tokenfilters.0.tokens.2.token: buzz } + - match: { detail.tokenfilters.1.name: "_anonymous_tokenfilter_[1]" } + - match: { detail.tokenfilters.1.tokens.0.token: bar } +--- +"Custom char_filter in request": + - do: + indices.analyze: + body: { "text": "jeff quit phish", "char_filter": [{"type": "mapping", "mappings": ["ph => f", "qu => q"]}], "tokenizer": keyword } + - length: {tokens: 1 } + - match: { tokens.0.token: "jeff qit fish" } + +--- +"Custom tokenizer in request": + - do: + indices.analyze: + body: { "text": "good", "tokenizer": {"type": "nGram", "min_gram": 2, "max_gram": 2}, "explain": true } + - length: {detail.tokenizer.tokens: 3 } + - match: { detail.tokenizer.name: _anonymous_tokenizer } + - match: { detail.tokenizer.tokens.0.token: go } + - match: { detail.tokenizer.tokens.1.token: oo } + - match: { detail.tokenizer.tokens.2.token: od } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yaml index 17b86d64764..3c6ad7a7051 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yaml @@ -30,6 +30,35 @@ - match: { test_index.settings.index.number_of_replicas: "0"} +--- +"Create index with too large wait_for_active_shards": + + - do: + indices.create: + index: test_index + timeout: 100ms + master_timeout: 100ms + wait_for_active_shards: 6 + body: + settings: + number_of_replicas: 5 + + - match: { shards_acknowledged: false } + +--- +"Create index with wait_for_active_shards set to all": + + - do: + indices.create: + index: test_index + wait_for_active_shards: all + body: + settings: + number_of_replicas: "0" + + - match: { acknowledged: true } + - match: { shards_acknowledged: true } + --- "Create index with aliases": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/10_basic.yaml index 218d1e0433c..54728644f72 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get/10_basic.yaml @@ -33,10 +33,6 @@ setup: test_alias: {} test_blias: {} - - do: - cluster.health: - wait_for_status: yellow - - do: indices.close: index: test_index_3 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yaml index 372b5695d32..6cf0a0b7cf2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/10_basic.yaml @@ -9,9 +9,6 @@ setup: properties: text: type: text - - do: - cluster.health: - wait_for_status: yellow --- "Get field mapping with no index and type": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yaml index 1a97f69ba89..9b8c3efbce8 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/20_missing_field.yaml @@ -11,9 +11,6 @@ text: type: text analyzer: whitespace - - do: - cluster.health: - wait_for_status: yellow - do: indices.get_field_mapping: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/30_missing_type.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/30_missing_type.yaml index efee9cbc3a3..dfbd9a4af77 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/30_missing_type.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/30_missing_type.yaml @@ -11,9 +11,6 @@ text: type: text analyzer: whitespace - - do: - cluster.health: - wait_for_status: yellow - do: catch: missing diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yaml index 15056d3377a..9d62ab6101f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yaml @@ -40,10 +40,6 @@ setup: i_t3: type: text - - do: - cluster.health: - wait_for_status: yellow - --- "Get field mapping with * for fields": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yaml index 3a4821193e6..199d4561d09 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yaml @@ -4,6 +4,7 @@ - do: indices.create: index: logs-1 + wait_for_active_shards: 1 body: aliases: logs_index: {} @@ -32,6 +33,7 @@ - do: indices.rollover: alias: "logs_search" + wait_for_active_shards: 1 body: conditions: max_docs: 1 @@ -49,10 +51,6 @@ - is_true: '' - - do: - cluster.health: - wait_for_status: yellow - # index into new index - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yaml index b1aac4952c4..4aeb66812c4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yaml @@ -6,6 +6,7 @@ - do: indices.create: index: source + wait_for_active_shards: 1 body: settings: number_of_replicas: "0" @@ -47,12 +48,14 @@ wait_for_status: green index: source wait_for_relocating_shards: 0 + wait_for_events: "languid" # now we do the actual shrink - do: indices.shrink: index: "source" target: "target" + wait_for_active_shards: 1 body: settings: index.number_of_replicas: 0 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/12_level.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/12_level.yaml index fb71e8d2032..c766f5eb625 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/12_level.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/12_level.yaml @@ -15,10 +15,6 @@ setup: id: 1 body: { "foo": "baz" } - - do: - cluster.health: - wait_for_status: yellow - --- "Level - blank": - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/10_basic.yaml index 6fce0392e35..a1f9aa87636 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/10_basic.yaml @@ -6,10 +6,6 @@ setup: settings: number_of_replicas: 0 - - do: - cluster.health: - wait_for_status: yellow - --- "Validate query api": - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/11_default_index_type.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/11_default_index_type.yaml index b556cf9ec45..773b7e3bcfe 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/11_default_index_type.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/11_default_index_type.yaml @@ -10,10 +10,6 @@ id: 1 body: { foo: bar } - - do: - cluster.health: - wait_for_status: yellow - - do: mget: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yaml index f91f3236f96..fd0100b0d63 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/12_non_existent_index.yaml @@ -7,10 +7,6 @@ id: 1 body: { foo: bar } - - do: - cluster.health: - wait_for_status: yellow - - do: mget: body: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yaml index 8d986a330bf..d7af1797f7a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/13_missing_metadata.yaml @@ -8,10 +8,6 @@ id: 1 body: { foo: bar } - - do: - cluster.health: - wait_for_status: yellow - - do: catch: /action_request_validation_exception.+ id is missing/ mget: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yaml index cdd6c5724fe..87c08a3f616 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/15_ids.yaml @@ -14,10 +14,6 @@ id: 2 body: { foo: baz } - - do: - cluster.health: - wait_for_status: yellow - - do: mget: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_fields.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_fields.yaml index 347fc25f67e..f56859ec3cb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_fields.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/20_fields.yaml @@ -8,10 +8,6 @@ id: 1 body: { foo: bar } - - do: - cluster.health: - wait_for_status: yellow - - do: mget: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/30_parent.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/30_parent.yaml index c9a8b68176f..e1cf8df4fc3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/mget/30_parent.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/mget/30_parent.yaml @@ -10,9 +10,6 @@ _parent: { type: "foo" } settings: number_of_shards: 5 - - do: - cluster.health: - wait_for_status: yellow - do: index: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/11_status.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/11_status.yaml index ef4d53167b6..1451edaf5be 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/11_status.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/msearch/11_status.yaml @@ -3,10 +3,6 @@ setup: - do: indices.create: index: test_1 - - do: - cluster.health: - wait_for_status: yellow - --- "Check Status": - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml index 84bf44f7392..424153aa573 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yaml @@ -95,7 +95,7 @@ setup: - do: search: body: - fields: [ include.field2 ] + stored_fields: [ include.field2 ] query: { match_all: {} } - is_false: hits.hits.0._source @@ -104,7 +104,7 @@ setup: - do: search: body: - fields: [ include.field2, _source ] + stored_fields: [ include.field2, _source ] query: { match_all: {} } - match: { hits.hits.0._source.include.field2: v2 } - is_true: hits.hits.0._source @@ -113,5 +113,5 @@ setup: "fielddata_fields": - do: search: - fielddata_fields: [ "count" ] + docvalue_fields: [ "count" ] - match: { hits.hits.0.fields.count: [1] } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yaml index 0c5c1319f10..a93cefff699 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/30_limits.yaml @@ -10,13 +10,22 @@ setup: indices.refresh: {} --- -"Request window limits": +"Request window limits without scroll": - do: - catch: /Result window is too large, from \+ size must be less than or equal to[:] \[10000\] but was \[10010\]/ + catch: /Result window is too large, from \+ size must be less than or equal to[:] \[10000\] but was \[10010\]\. See the scroll api for a more efficient way to request large data sets\./ search: index: test_1 from: 10000 +--- +"Request window limits with scroll": + - do: + catch: /Batch size is too large, size must be less than or equal to[:] \[10000\] but was \[10010\]\. Scroll batch sizes cost as much memory as result windows so they are controlled by the \[index.max_result_window\] index level setting\./ + search: + index: test_1 + scroll: 5m + from: 10000 + --- "Rescore window limits": - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/issue4895.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/issue4895.yaml index df7322f12c8..993cbed2647 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/issue4895.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/issue4895.yaml @@ -31,6 +31,6 @@ setup: term: data: some preference: _local - fields: [user,amount] + stored_fields: [user,amount] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml index f442ffdb04e..4bfe6e953b2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml @@ -4,10 +4,6 @@ indices.create: index: test_1 - - do: - cluster.health: - wait_for_status: yellow - - do: search_shards: index: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yaml index 91080f99930..43db8e5206f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/snapshot.create/10_basic.yaml @@ -17,10 +17,6 @@ setup: number_of_shards: 1 number_of_replicas: 1 - - do: - cluster.health: - wait_for_status: yellow - --- "Create a snapshot": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/50_parent.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/50_parent.yaml index 15adf26596c..b25662dbf14 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/50_parent.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/50_parent.yaml @@ -7,10 +7,6 @@ setup: mappings: test: _parent: { type: "foo" } - - do: - cluster.health: - wait_for_status: yellow - --- "Parent": diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yaml index 8c0e7e66c97..c5fe68e26c9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yaml @@ -8,9 +8,6 @@ settings: index.refresh_interval: -1 number_of_replicas: 0 - - do: - cluster.health: - wait_for_status: yellow - do: update: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yaml index 0194453a99c..5478d84e2a3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yaml @@ -13,10 +13,6 @@ test: _parent: { type: "foo" } - - do: - cluster.health: - wait_for_status: yellow - - do: update: index: test_1 diff --git a/settings.gradle b/settings.gradle index 6588b605a9d..e070e761433 100644 --- a/settings.gradle +++ b/settings.gradle @@ -7,6 +7,7 @@ List projects = [ 'docs', 'client:rest', 'client:sniffer', + 'client:transport', 'client:test', 'benchmarks', 'distribution:integ-test-zip', @@ -24,6 +25,7 @@ List projects = [ 'modules:lang-groovy', 'modules:lang-mustache', 'modules:lang-painless', + 'modules:transport-netty3', 'modules:reindex', 'modules:percolator', 'plugins:analysis-icu', @@ -36,6 +38,7 @@ List projects = [ 'plugins:discovery-gce', 'plugins:ingest-geoip', 'plugins:ingest-attachment', + 'plugins:ingest-user-agent', 'plugins:lang-javascript', 'plugins:lang-python', 'plugins:mapper-attachments', @@ -55,6 +58,7 @@ List projects = [ 'qa:smoke-test-multinode', 'qa:smoke-test-plugins', 'qa:smoke-test-reindex-with-painless', + 'qa:smoke-test-http', 'qa:vagrant', ] diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index 5b79721948e..fe624297e72 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -150,7 +150,7 @@ public class BootstrapForTesting { return esPolicy.implies(domain, permission) || testFramework.implies(domain, permission); } }); - System.setSecurityManager(new SecureSM(true)); + System.setSecurityManager(SecureSM.createTestSecureSM()); Security.selfTest(); // guarantee plugin classes are initialized first, in case they have one-time hacks. diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java index aa327ae2546..2a04a5be97f 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/ESElasticsearchCliTestCase.java @@ -22,6 +22,7 @@ package org.elasticsearch.bootstrap; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.test.ESTestCase; +import java.nio.file.Path; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; @@ -31,7 +32,7 @@ import static org.hamcrest.CoreMatchers.equalTo; abstract class ESElasticsearchCliTestCase extends ESTestCase { interface InitConsumer { - void accept(final boolean foreground, final String pidFile, final Map esSettings); + void accept(final boolean foreground, final Path pidFile, final Map esSettings); } void runTest( @@ -45,7 +46,7 @@ abstract class ESElasticsearchCliTestCase extends ESTestCase { final AtomicBoolean init = new AtomicBoolean(); final int status = Elasticsearch.main(args, new Elasticsearch() { @Override - void init(final boolean daemonize, final String pidFile, final Map esSettings) { + void init(final boolean daemonize, final Path pidFile, final Map esSettings) { init.set(true); initConsumer.accept(!daemonize, pidFile, esSettings); } @@ -53,12 +54,12 @@ abstract class ESElasticsearchCliTestCase extends ESTestCase { assertThat(status, equalTo(expectedStatus)); assertThat(init.get(), equalTo(expectedInit)); outputConsumer.accept(terminal.getOutput()); - } catch (Throwable t) { + } catch (Exception e) { // if an unexpected exception is thrown, we log // terminal output to aid debugging logger.info(terminal.getOutput()); // rethrow so the test fails - throw t; + throw e; } } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java index 128b0d0e315..45edbd8bcb2 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java @@ -32,7 +32,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.plugins.Plugin; @@ -67,7 +67,7 @@ public class MockInternalClusterInfoService extends InternalClusterInfoService { usage.getTotalBytes(), usage.getFreeBytes(), usage.getFreeBytes()); paths[0] = path; FsInfo fsInfo = new FsInfo(System.currentTimeMillis(), null, paths); - return new NodeStats(new DiscoveryNode(nodeName, DummyTransportAddress.INSTANCE, emptyMap(), emptySet(), Version.CURRENT), + return new NodeStats(new DiscoveryNode(nodeName, LocalTransportAddress.buildUnique(), emptyMap(), emptySet(), Version.CURRENT), System.currentTimeMillis(), null, null, null, null, null, fsInfo, diff --git a/core/src/test/java/org/elasticsearch/common/bytes/AbstractBytesReferenceTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReferenceTestCase.java similarity index 58% rename from core/src/test/java/org/elasticsearch/common/bytes/AbstractBytesReferenceTestCase.java rename to test/framework/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReferenceTestCase.java index dd9d93dd22a..103e263ffa6 100644 --- a/core/src/test/java/org/elasticsearch/common/bytes/AbstractBytesReferenceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReferenceTestCase.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ByteArray; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; import java.io.EOFException; import java.io.IOException; @@ -43,11 +42,20 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { public void testGet() throws IOException { int length = randomIntBetween(1, PAGE_SIZE * 3); BytesReference pbr = newBytesReference(length); + int sliceOffset = randomIntBetween(0, length / 2); int sliceLength = Math.max(1, length - sliceOffset - 1); BytesReference slice = pbr.slice(sliceOffset, sliceLength); assertEquals(pbr.get(sliceOffset), slice.get(0)); assertEquals(pbr.get(sliceOffset + sliceLength - 1), slice.get(sliceLength - 1)); + final int probes = randomIntBetween(20, 100); + BytesReference copy = new BytesArray(pbr.toBytesRef(), true); + for (int i = 0; i < probes; i++) { + int index = randomIntBetween(0, copy.length() - 1); + assertEquals(pbr.get(index), copy.get(index)); + index = randomIntBetween(sliceOffset, sliceOffset + sliceLength - 1); + assertEquals(pbr.get(index), slice.get(index - sliceOffset)); + } } public void testLength() throws IOException { @@ -66,12 +74,9 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { int sliceLength = Math.max(0, length - sliceOffset - 1); BytesReference slice = pbr.slice(sliceOffset, sliceLength); assertEquals(sliceLength, slice.length()); - - if (slice.hasArray()) { - assertEquals(sliceOffset, slice.arrayOffset()); - } else { - expectThrows(IllegalStateException.class, () -> - slice.arrayOffset()); + BytesRef singlePageOrNull = getSinglePageOrNull(slice); + if (singlePageOrNull != null) { + assertEquals(sliceOffset, singlePageOrNull.offset); } } @@ -109,7 +114,7 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { // bulk-read all si.readFully(targetBuf); - assertArrayEquals(pbr.toBytes(), targetBuf); + assertArrayEquals(BytesReference.toBytes(pbr), targetBuf); // continuing to read should now fail with EOFException try { @@ -125,6 +130,26 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { si.readBytes(targetBuf, 0, length * 2)); } + public void testStreamInputMarkAndReset() throws IOException { + int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); + BytesReference pbr = newBytesReference(length); + StreamInput si = pbr.streamInput(); + assertNotNull(si); + + StreamInput wrap = StreamInput.wrap(BytesReference.toBytes(pbr)); + while(wrap.available() > 0) { + if (rarely()) { + wrap.mark(Integer.MAX_VALUE); + si.mark(Integer.MAX_VALUE); + } else if (rarely()) { + wrap.reset(); + si.reset(); + } + assertEquals(si.readByte(), wrap.readByte()); + assertEquals(si.available(), wrap.available()); + } + } + public void testStreamInputBulkReadWithOffset() throws IOException { final int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); BytesReference pbr = newBytesReference(length); @@ -141,7 +166,7 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { // now do NOT reset the stream - keep the stream's offset! // buffer to compare remaining bytes against bulk read - byte[] pbrBytesWithOffset = Arrays.copyOfRange(pbr.toBytes(), offset, length); + byte[] pbrBytesWithOffset = Arrays.copyOfRange(BytesReference.toBytes(pbr), offset, length); // randomized target buffer to ensure no stale slots byte[] targetBytes = new byte[pbrBytesWithOffset.length]; random().nextBytes(targetBytes); @@ -178,7 +203,7 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { } assertEquals(pbr.length(), target.length()); BytesRef targetBytes = target.get(); - assertArrayEquals(pbr.toBytes(), Arrays.copyOfRange(targetBytes.bytes, targetBytes.offset, targetBytes.length)); + assertArrayEquals(BytesReference.toBytes(pbr), Arrays.copyOfRange(targetBytes.bytes, targetBytes.offset, targetBytes.length)); } public void testSliceStreamInput() throws IOException { @@ -208,11 +233,11 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { assertEquals(sliceInput.available(), 0); // compare slice content with upper half of original - byte[] pbrSliceBytes = Arrays.copyOfRange(pbr.toBytes(), sliceOffset, length); + byte[] pbrSliceBytes = Arrays.copyOfRange(BytesReference.toBytes(pbr), sliceOffset, length); assertArrayEquals(pbrSliceBytes, sliceBytes); // compare slice bytes with bytes read from slice via streamInput :D - byte[] sliceToBytes = slice.toBytes(); + byte[] sliceToBytes = BytesReference.toBytes(slice); assertEquals(sliceBytes.length, sliceToBytes.length); assertArrayEquals(sliceBytes, sliceToBytes); @@ -233,10 +258,31 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { BytesStreamOutput out = new BytesStreamOutput(); pbr.writeTo(out); assertEquals(pbr.length(), out.size()); - assertArrayEquals(pbr.toBytes(), out.bytes().toBytes()); + assertArrayEquals(BytesReference.toBytes(pbr), BytesReference.toBytes(out.bytes())); out.close(); } + public void testInputStreamSkip() throws IOException { + int length = randomIntBetween(10, scaledRandomIntBetween(PAGE_SIZE * 2, PAGE_SIZE * 20)); + BytesReference pbr = newBytesReference(length); + final int iters = randomIntBetween(5, 50); + for (int i = 0; i < iters; i++) { + try (StreamInput input = pbr.streamInput()) { + final int offset = randomIntBetween(0, length-1); + assertEquals(offset, input.skip(offset)); + assertEquals(pbr.get(offset), input.readByte()); + if (offset == length - 1) { + continue; // no more bytes to retrieve! + } + final int nextOffset = randomIntBetween(offset, length-2); + assertEquals(nextOffset - offset, input.skip(nextOffset - offset)); + assertEquals(pbr.get(nextOffset+1), input.readByte()); // +1 for the one byte we read above + assertEquals(length - (nextOffset+2), input.skip(Long.MAX_VALUE)); + assertEquals(0, input.skip(randomIntBetween(0, Integer.MAX_VALUE))); + } + } + } + public void testSliceWriteToOutputStream() throws IOException { int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 5)); BytesReference pbr = newBytesReference(length); @@ -246,7 +292,7 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { BytesStreamOutput sliceOut = new BytesStreamOutput(sliceLength); slice.writeTo(sliceOut); assertEquals(slice.length(), sliceOut.size()); - assertArrayEquals(slice.toBytes(), sliceOut.bytes().toBytes()); + assertArrayEquals(BytesReference.toBytes(slice), BytesReference.toBytes(sliceOut.bytes())); sliceOut.close(); } @@ -254,16 +300,19 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))}; for (int i = 0; i < sizes.length; i++) { BytesReference pbr = newBytesReference(sizes[i]); - byte[] bytes = pbr.toBytes(); + byte[] bytes = BytesReference.toBytes(pbr); assertEquals(sizes[i], bytes.length); + for (int j = 0; j < bytes.length; j++) { + assertEquals(bytes[j], pbr.get(j)); + } } } - public void testToBytesArraySharedPage() throws IOException { + public void testToBytesRefSharedPage() throws IOException { int length = randomIntBetween(10, PAGE_SIZE); BytesReference pbr = newBytesReference(length); - BytesArray ba = pbr.toBytesArray(); - BytesArray ba2 = pbr.toBytesArray(); + BytesArray ba = new BytesArray(pbr.toBytesRef()); + BytesArray ba2 = new BytesArray(pbr.toBytesRef()); assertNotNull(ba); assertNotNull(ba2); assertEquals(pbr.length(), ba.length()); @@ -272,46 +321,46 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { assertSame(ba.array(), ba2.array()); } - public void testToBytesArrayMaterializedPages() throws IOException { + public void testToBytesRefMaterializedPages() throws IOException { // we need a length != (n * pagesize) to avoid page sharing at boundaries int length = 0; while ((length % PAGE_SIZE) == 0) { length = randomIntBetween(PAGE_SIZE, PAGE_SIZE * randomIntBetween(2, 5)); } BytesReference pbr = newBytesReference(length); - BytesArray ba = pbr.toBytesArray(); - BytesArray ba2 = pbr.toBytesArray(); + BytesArray ba = new BytesArray(pbr.toBytesRef()); + BytesArray ba2 = new BytesArray(pbr.toBytesRef()); assertNotNull(ba); assertNotNull(ba2); assertEquals(pbr.length(), ba.length()); assertEquals(ba.length(), ba2.length()); } - public void testCopyBytesArray() throws IOException { + public void testCopyBytesRefSharesBytes() throws IOException { // small PBR which would normally share the first page int length = randomIntBetween(10, PAGE_SIZE); BytesReference pbr = newBytesReference(length); - BytesArray ba = pbr.copyBytesArray(); - BytesArray ba2 = pbr.copyBytesArray(); + BytesArray ba = new BytesArray(pbr.toBytesRef(), true); + BytesArray ba2 = new BytesArray(pbr.toBytesRef(), true); assertNotNull(ba); assertNotSame(ba, ba2); assertNotSame(ba.array(), ba2.array()); } - public void testSliceCopyBytesArray() throws IOException { + public void testSliceCopyBytesRef() throws IOException { int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); BytesReference pbr = newBytesReference(length); int sliceOffset = randomIntBetween(0, pbr.length()); - int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); + int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset); BytesReference slice = pbr.slice(sliceOffset, sliceLength); - BytesArray ba1 = slice.copyBytesArray(); - BytesArray ba2 = slice.copyBytesArray(); + BytesArray ba1 = new BytesArray(slice.toBytesRef(), true); + BytesArray ba2 = new BytesArray(slice.toBytesRef(), true); assertNotNull(ba1); assertNotNull(ba2); assertNotSame(ba1.array(), ba2.array()); - assertArrayEquals(slice.toBytes(), ba1.array()); - assertArrayEquals(slice.toBytes(), ba2.array()); + assertArrayEquals(BytesReference.toBytes(slice), ba1.array()); + assertArrayEquals(BytesReference.toBytes(slice), ba2.array()); assertArrayEquals(ba1.array(), ba2.array()); } @@ -329,14 +378,14 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { while((ref = iterator.next()) != null) { builder.append(ref); } - assertArrayEquals(pbr.toBytes(), BytesRef.deepCopyOf(builder.toBytesRef()).bytes); + assertArrayEquals(BytesReference.toBytes(pbr), BytesRef.deepCopyOf(builder.toBytesRef()).bytes); } public void testSliceIterator() throws IOException { int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); BytesReference pbr = newBytesReference(length); int sliceOffset = randomIntBetween(0, pbr.length()); - int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); + int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset); BytesReference slice = pbr.slice(sliceOffset, sliceLength); BytesRefIterator iterator = slice.iterator(); BytesRef ref = null; @@ -344,7 +393,7 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { while((ref = iterator.next()) != null) { builder.append(ref); } - assertArrayEquals(slice.toBytes(), BytesRef.deepCopyOf(builder.toBytesRef()).bytes); + assertArrayEquals(BytesReference.toBytes(slice), BytesRef.deepCopyOf(builder.toBytesRef()).bytes); } public void testIteratorRandom() throws IOException { @@ -352,12 +401,12 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { BytesReference pbr = newBytesReference(length); if (randomBoolean()) { int sliceOffset = randomIntBetween(0, pbr.length()); - int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); + int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset); pbr = pbr.slice(sliceOffset, sliceLength); } if (randomBoolean()) { - pbr = pbr.toBytesArray(); + pbr = new BytesArray(pbr.toBytesRef()); } BytesRefIterator iterator = pbr.iterator(); BytesRef ref = null; @@ -365,50 +414,40 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { while((ref = iterator.next()) != null) { builder.append(ref); } - assertArrayEquals(pbr.toBytes(), BytesRef.deepCopyOf(builder.toBytesRef()).bytes); - } - - public void testArray() throws IOException { - int[] sizes = {0, randomInt(PAGE_SIZE), PAGE_SIZE, randomIntBetween(2, PAGE_SIZE * randomIntBetween(2, 5))}; - - for (int i = 0; i < sizes.length; i++) { - BytesReference pbr = newBytesReference(sizes[i]); - byte[] array = pbr.array(); - assertNotNull(array); - assertEquals(sizes[i], array.length); - assertSame(array, pbr.array()); - } + assertArrayEquals(BytesReference.toBytes(pbr), BytesRef.deepCopyOf(builder.toBytesRef()).bytes); } public void testArrayOffset() throws IOException { int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5)); BytesReference pbr = newBytesReference(length); - if (pbr.hasArray()) { - assertEquals(0, pbr.arrayOffset()); - } else { - expectThrows(IllegalStateException.class, () -> - pbr.arrayOffset()); + BytesRef singlePageOrNull = getSinglePageOrNull(pbr); + if (singlePageOrNull != null) { + assertEquals(0, singlePageOrNull.offset); } } public void testSliceArrayOffset() throws IOException { int length = randomInt(PAGE_SIZE * randomIntBetween(2, 5)); BytesReference pbr = newBytesReference(length); - int sliceOffset = randomIntBetween(0, pbr.length()); - int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); + int sliceOffset = randomIntBetween(0, pbr.length() - 1); // an offset to the end would be len 0 + int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset); BytesReference slice = pbr.slice(sliceOffset, sliceLength); - if (slice.hasArray()) { - assertEquals(sliceOffset, slice.arrayOffset()); - } else { - expectThrows(IllegalStateException.class, () -> - slice.arrayOffset()); + BytesRef singlePageOrNull = getSinglePageOrNull(slice); + if (singlePageOrNull != null) { + if (getSinglePageOrNull(pbr) == null) { + // original reference has pages + assertEquals(sliceOffset % PAGE_SIZE, singlePageOrNull.offset); + } else { + // orig ref has no pages ie. BytesArray + assertEquals(sliceOffset, singlePageOrNull.offset); + } } } public void testToUtf8() throws IOException { // test empty BytesReference pbr = newBytesReference(0); - assertEquals("", pbr.toUtf8()); + assertEquals("", pbr.utf8ToString()); // TODO: good way to test? } @@ -417,7 +456,6 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { BytesReference pbr = newBytesReference(length); BytesRef ref = pbr.toBytesRef(); assertNotNull(ref); - assertEquals(pbr.arrayOffset(), ref.offset); assertEquals(pbr.length(), ref.length); } @@ -426,19 +464,18 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { BytesReference pbr = newBytesReference(length); // get a BytesRef from a slice int sliceOffset = randomIntBetween(0, pbr.length()); - int sliceLength = randomIntBetween(pbr.length() - sliceOffset, pbr.length() - sliceOffset); - BytesRef sliceRef = pbr.slice(sliceOffset, sliceLength).toBytesRef(); - // note that these are only true if we have <= than a page, otherwise offset/length are shifted - assertEquals(sliceOffset, sliceRef.offset); - assertEquals(sliceLength, sliceRef.length); - } + int sliceLength = randomIntBetween(0, pbr.length() - sliceOffset); - public void testCopyBytesRef() throws IOException { - int length = randomIntBetween(0, PAGE_SIZE * randomIntBetween(2, 5)); - BytesReference pbr = newBytesReference(length); - BytesRef ref = pbr.copyBytesRef(); - assertNotNull(ref); - assertEquals(pbr.length(), ref.length); + BytesRef sliceRef = pbr.slice(sliceOffset, sliceLength).toBytesRef(); + + if (sliceLength == 0 && sliceOffset != sliceRef.offset) { + // some impls optimize this to an empty instance then the offset will be 0 + assertEquals(0, sliceRef.offset); + } else { + // note that these are only true if we have <= than a page, otherwise offset/length are shifted + assertEquals(sliceOffset, sliceRef.offset); + } + assertEquals(sliceLength, sliceRef.length); } public void testHashCode() throws IOException { @@ -448,40 +485,36 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { // test with content pbr = newBytesReference(randomIntBetween(0, PAGE_SIZE * randomIntBetween(2, 5))); - int jdkHash = Arrays.hashCode(pbr.toBytes()); + int jdkHash = Arrays.hashCode(BytesReference.toBytes(pbr)); int pbrHash = pbr.hashCode(); assertEquals(jdkHash, pbrHash); // test hashes of slices int sliceFrom = randomIntBetween(0, pbr.length()); - int sliceLength = randomIntBetween(pbr.length() - sliceFrom, pbr.length() - sliceFrom); + int sliceLength = randomIntBetween(0, pbr.length() - sliceFrom); BytesReference slice = pbr.slice(sliceFrom, sliceLength); - int sliceJdkHash = Arrays.hashCode(slice.toBytes()); + int sliceJdkHash = Arrays.hashCode(BytesReference.toBytes(slice)); int sliceHash = slice.hashCode(); assertEquals(sliceJdkHash, sliceHash); } - public void testEquals() { - int length = randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5)); - ByteArray ba1 = bigarrays.newByteArray(length, false); - ByteArray ba2 = bigarrays.newByteArray(length, false); - - // copy contents - for (long i = 0; i < length; i++) { - ba2.set(i, ba1.get(i)); - } + public void testEquals() throws IOException { + BytesReference bytesReference = newBytesReference(randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5))); + BytesReference copy = bytesReference.slice(0, bytesReference.length()); // get refs & compare - BytesReference pbr = new PagedBytesReference(bigarrays, ba1, length); - BytesReference pbr2 = new PagedBytesReference(bigarrays, ba2, length); - assertEquals(pbr, pbr2); - } + assertEquals(copy, bytesReference); + int sliceFrom = randomIntBetween(0, bytesReference.length()); + int sliceLength = randomIntBetween(0, bytesReference.length() - sliceFrom); + assertEquals(copy.slice(sliceFrom, sliceLength), bytesReference.slice(sliceFrom, sliceLength)); - public void testEqualsPeerClass() throws IOException { - int length = randomIntBetween(100, PAGE_SIZE * randomIntBetween(2, 5)); - BytesReference pbr = newBytesReference(length); - BytesReference ba = new BytesArray(pbr.toBytes()); - assertEquals(pbr, ba); + BytesRef bytesRef = BytesRef.deepCopyOf(copy.toBytesRef()); + assertEquals(new BytesArray(bytesRef), copy); + + int offsetToFlip = randomIntBetween(0, bytesRef.length - 1); + int value = ~Byte.toUnsignedInt(bytesRef.bytes[bytesRef.offset+offsetToFlip]); + bytesRef.bytes[bytesRef.offset+offsetToFlip] = (byte)value; + assertNotEquals(new BytesArray(bytesRef), copy); } public void testSliceEquals() { @@ -491,19 +524,118 @@ public abstract class AbstractBytesReferenceTestCase extends ESTestCase { // test equality of slices int sliceFrom = randomIntBetween(0, pbr.length()); - int sliceLength = randomIntBetween(pbr.length() - sliceFrom, pbr.length() - sliceFrom); + int sliceLength = randomIntBetween(0, pbr.length() - sliceFrom); BytesReference slice1 = pbr.slice(sliceFrom, sliceLength); BytesReference slice2 = pbr.slice(sliceFrom, sliceLength); - assertArrayEquals(slice1.toBytes(), slice2.toBytes()); + assertArrayEquals(BytesReference.toBytes(slice1), BytesReference.toBytes(slice2)); // test a slice with same offset but different length, // unless randomized testing gave us a 0-length slice. if (sliceLength > 0) { BytesReference slice3 = pbr.slice(sliceFrom, sliceLength / 2); - assertFalse(Arrays.equals(slice1.toBytes(), slice3.toBytes())); + assertFalse(Arrays.equals(BytesReference.toBytes(slice1), BytesReference.toBytes(slice3))); } } protected abstract BytesReference newBytesReference(int length) throws IOException; + public void testCompareTo() throws IOException { + final int iters = randomIntBetween(5, 10); + for (int i = 0; i < iters; i++) { + int length = randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); + BytesReference bytesReference = newBytesReference(length); + assertTrue(bytesReference.compareTo(new BytesArray("")) > 0); + assertTrue(new BytesArray("").compareTo(bytesReference) < 0); + + + assertEquals(0, bytesReference.compareTo(bytesReference)); + int sliceFrom = randomIntBetween(0, bytesReference.length()); + int sliceLength = randomIntBetween(0, bytesReference.length() - sliceFrom); + BytesReference slice = bytesReference.slice(sliceFrom, sliceLength); + + assertEquals(bytesReference.toBytesRef().compareTo(slice.toBytesRef()), + new BytesArray(bytesReference.toBytesRef(), true).compareTo(new BytesArray(slice.toBytesRef(), true))); + + assertEquals(bytesReference.toBytesRef().compareTo(slice.toBytesRef()), + bytesReference.compareTo(slice)); + assertEquals(slice.toBytesRef().compareTo(bytesReference.toBytesRef()), + slice.compareTo(bytesReference)); + + assertEquals(0, slice.compareTo(new BytesArray(slice.toBytesRef()))); + assertEquals(0, new BytesArray(slice.toBytesRef()).compareTo(slice)); + + final int crazyLength = length + randomIntBetween(10, PAGE_SIZE * randomIntBetween(2, 8)); + ReleasableBytesStreamOutput crazyStream = new ReleasableBytesStreamOutput(length, bigarrays); + final int offset = randomIntBetween(0, crazyLength - length); + for (int j = 0; j < offset; j++) { + crazyStream.writeByte((byte) random().nextInt(1 << 8)); + } + bytesReference.writeTo(crazyStream); + for (int j = crazyStream.size(); j < crazyLength; j++) { + crazyStream.writeByte((byte) random().nextInt(1 << 8)); + } + PagedBytesReference crazyReference = crazyStream.bytes(); + + assertFalse(crazyReference.compareTo(bytesReference) == 0); + assertEquals(0, crazyReference.slice(offset, length).compareTo( + bytesReference)); + assertEquals(0, bytesReference.compareTo( + crazyReference.slice(offset, length))); + } + } + + public static BytesRef getSinglePageOrNull(BytesReference ref) throws IOException { + if (ref.length() > 0) { + BytesRefIterator iterator = ref.iterator(); + BytesRef next = iterator.next(); + BytesRef retVal = next.clone(); + if (iterator.next() == null) { + return retVal; + } + } else { + return new BytesRef(); + } + return null; + } + + public static int getNumPages(BytesReference ref) throws IOException { + int num = 0; + if (ref.length() > 0) { + BytesRefIterator iterator = ref.iterator(); + while(iterator.next() != null) { + num++; + } + } + return num; + } + + + public void testBasicEquals() { + final int len = randomIntBetween(0, randomBoolean() ? 10: 100000); + final int offset1 = randomInt(5); + final byte[] array1 = new byte[offset1 + len + randomInt(5)]; + random().nextBytes(array1); + final int offset2 = randomInt(offset1); + final byte[] array2 = Arrays.copyOfRange(array1, offset1 - offset2, array1.length); + + final BytesArray b1 = new BytesArray(array1, offset1, len); + final BytesArray b2 = new BytesArray(array2, offset2, len); + assertEquals(b1, b2); + assertEquals(Arrays.hashCode(BytesReference.toBytes(b1)), b1.hashCode()); + assertEquals(Arrays.hashCode(BytesReference.toBytes(b2)), b2.hashCode()); + + // test same instance + assertEquals(b1, b1); + assertEquals(b2, b2); + + if (len > 0) { + // test different length + BytesArray differentLen = new BytesArray(array1, offset1, randomInt(len - 1)); + assertNotEquals(b1, differentLen); + + // test changed bytes + array1[offset1 + randomInt(len - 1)] += 13; + assertNotEquals(b1, b2); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java b/test/framework/src/main/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java deleted file mode 100644 index a1c9da18470..00000000000 --- a/test/framework/src/main/java/org/elasticsearch/common/bytes/ByteBufferBytesReference.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.bytes; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.io.stream.ByteBufferStreamInput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.jboss.netty.util.CharsetUtil; - -import java.io.IOException; -import java.io.OutputStream; -import java.nio.ByteBuffer; -import java.nio.CharBuffer; -import java.nio.charset.CharacterCodingException; -import java.nio.charset.CharsetDecoder; -import java.nio.charset.CoderResult; -import java.nio.charset.StandardCharsets; - -/** - * Note: this is only used by one lone test method. - */ -public class ByteBufferBytesReference implements BytesReference { - - private final ByteBuffer buffer; - - public ByteBufferBytesReference(ByteBuffer buffer) { - this.buffer = buffer; - } - - @Override - public byte get(int index) { - return buffer.get(buffer.position() + index); - } - - @Override - public int length() { - return buffer.remaining(); - } - - @Override - public BytesReference slice(int from, int length) { - ByteBuffer dup = buffer.duplicate(); - dup.position(buffer.position() + from); - dup.limit(buffer.position() + from + length); - return new ByteBufferBytesReference(dup); - } - - @Override - public StreamInput streamInput() { - return new ByteBufferStreamInput(buffer); - } - - @Override - public void writeTo(OutputStream os) throws IOException { - if (buffer.hasArray()) { - os.write(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); - } else { - byte[] tmp = new byte[8192]; - ByteBuffer buf = buffer.duplicate(); - while (buf.hasRemaining()) { - buf.get(tmp, 0, Math.min(tmp.length, buf.remaining())); - os.write(tmp); - } - } - } - - @Override - public byte[] toBytes() { - if (!buffer.hasRemaining()) { - return BytesRef.EMPTY_BYTES; - } - byte[] tmp = new byte[buffer.remaining()]; - buffer.duplicate().get(tmp); - return tmp; - } - - @Override - public BytesArray toBytesArray() { - if (buffer.hasArray()) { - return new BytesArray(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); - } - return new BytesArray(toBytes()); - } - - @Override - public BytesArray copyBytesArray() { - return new BytesArray(toBytes()); - } - - @Override - public boolean hasArray() { - return buffer.hasArray(); - } - - @Override - public byte[] array() { - return buffer.array(); - } - - @Override - public int arrayOffset() { - return buffer.arrayOffset() + buffer.position(); - } - - @Override - public int hashCode() { - return Helper.bytesHashCode(this); - } - - @Override - public boolean equals(Object obj) { - return Helper.bytesEqual(this, (BytesReference) obj); - } - - @Override - public String toUtf8() { - if (!buffer.hasRemaining()) { - return ""; - } - final CharsetDecoder decoder = CharsetUtil.getDecoder(StandardCharsets.UTF_8); - final CharBuffer dst = CharBuffer.allocate( - (int) ((double) buffer.remaining() * decoder.maxCharsPerByte())); - try { - CoderResult cr = decoder.decode(buffer, dst, true); - if (!cr.isUnderflow()) { - cr.throwException(); - } - cr = decoder.flush(dst); - if (!cr.isUnderflow()) { - cr.throwException(); - } - } catch (CharacterCodingException x) { - throw new IllegalStateException(x); - } - return dst.flip().toString(); - } - - @Override - public BytesRef toBytesRef() { - if (buffer.hasArray()) { - return new BytesRef(buffer.array(), buffer.arrayOffset() + buffer.position(), buffer.remaining()); - } - return new BytesRef(toBytes()); - } - - @Override - public BytesRef copyBytesRef() { - return new BytesRef(toBytes()); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java index 613445c2271..22a1e2660b6 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java @@ -246,7 +246,7 @@ public class MockBigArrays extends BigArrays { return arr; } - private static abstract class AbstractArrayWrapper { + private abstract static class AbstractArrayWrapper { final BigArray in; boolean clearOnResize; diff --git a/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java b/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java index ccb2c3d69fe..7ddd2526fcd 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/index/MockEngineFactoryPlugin.java @@ -49,7 +49,7 @@ public class MockEngineFactoryPlugin extends Plugin { } @Override - public Collection nodeModules() { + public Collection createGuiceModules() { return Collections.singleton(new MockEngineReaderModule()); } diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/IngestTestPlugin.java b/test/framework/src/main/java/org/elasticsearch/ingest/IngestTestPlugin.java index b32a2eab991..dd38a0707b4 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/IngestTestPlugin.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/IngestTestPlugin.java @@ -19,22 +19,24 @@ package org.elasticsearch.ingest; -import org.elasticsearch.node.NodeModule; +import java.util.Collections; +import java.util.Map; + +import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.Plugin; /** * Adds an ingest processor to be used in tests. */ -public class IngestTestPlugin extends Plugin { - - public void onModule(NodeModule nodeModule) { - nodeModule.registerProcessor("test", (registry) -> config -> - new TestProcessor("id", "test", doc -> { - doc.setFieldValue("processed", true); - if (doc.hasField("fail") && doc.getFieldValue("fail", Boolean.class)) { - throw new IllegalArgumentException("test processor failed"); - } - }) - ); +public class IngestTestPlugin extends Plugin implements IngestPlugin { + @Override + public Map getProcessors(Processor.Parameters parameters) { + return Collections.singletonMap("test", (factories, tag, config) -> + new TestProcessor("id", "test", doc -> { + doc.setFieldValue("processed", true); + if (doc.hasField("fail") && doc.getFieldValue("fail", Boolean.class)) { + throw new IllegalArgumentException("test processor failed"); + } + })); } } diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java index e36d73a8d9f..4e4c5a24c0c 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/TestProcessor.java @@ -64,9 +64,10 @@ public class TestProcessor implements Processor { return invokedCounter.get(); } - public static final class Factory extends AbstractProcessorFactory { + public static final class Factory implements Processor.Factory { @Override - public TestProcessor doCreate(String processorTag, Map config) throws Exception { + public TestProcessor create(Map registry, String processorTag, + Map config) throws Exception { return new TestProcessor(processorTag, "test-processor", ingestDocument -> {}); } } diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java b/test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java index d44764fa8ac..190674ff61b 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/TestTemplateService.java @@ -22,17 +22,27 @@ package org.elasticsearch.ingest; import java.util.Map; public class TestTemplateService implements TemplateService { + private boolean compilationException; public static TemplateService instance() { - return new TestTemplateService(); + return new TestTemplateService(false); } - private TestTemplateService() { + public static TemplateService instance(boolean compilationException) { + return new TestTemplateService(compilationException); + } + + private TestTemplateService(boolean compilationException) { + this.compilationException = compilationException; } @Override public Template compile(String template) { - return new MockTemplate(template); + if (this.compilationException) { + throw new RuntimeException("could not compile script"); + } else { + return new MockTemplate(template); + } } public static class MockTemplate implements TemplateService.Template { diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index 61a0cf6c236..519e52074da 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -22,92 +22,60 @@ package org.elasticsearch.script; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; +import java.util.function.Function; /** - * A dummy script engine used for testing. Scripts must be a number. Many - * tests rely on the fact this thing returns a String as its compiled form. - * they even try to serialize it over the network! + * A mocked script engine that can be used for testing purpose. */ public class MockScriptEngine implements ScriptEngineService { public static final String NAME = "mockscript"; - /** A compiled script, just holds the scripts name, source, and params that were passed in */ - public static class MockCompiledScript { - public final String name; - public final String source; - public final Map params; + private final String type; + private final Map, Object>> scripts; - MockCompiledScript(String name, String source, Map params) { - this.name = name; - this.source = source; - this.params = params; - } + public MockScriptEngine(String type, Map, Object>> scripts) { + this.type = type; + this.scripts = Collections.unmodifiableMap(scripts); } - public static class TestPlugin extends Plugin implements ScriptPlugin { - @Override - public ScriptEngineService getScriptEngineService(Settings settings) { - return new MockScriptEngine(); - } + public MockScriptEngine() { + this(NAME, Collections.emptyMap()); } @Override public String getType() { - return NAME; + return type; } @Override public String getExtension() { - return NAME; + return getType(); } @Override - public Object compile(String scriptName, String scriptSource, Map params) { - return new MockCompiledScript(scriptName, scriptSource, params); + public Object compile(String name, String source, Map params) { + Function, Object> script = scripts.get(source); + return new MockCompiledScript(name, params, source, script); } @Override public ExecutableScript executable(CompiledScript compiledScript, @Nullable Map vars) { - assert compiledScript.compiled() instanceof MockCompiledScript - : "do NOT pass compiled scripts from other engines to me, I will fail your test, got: " + compiledScript; - return new AbstractExecutableScript() { - @Override - public Object run() { - return new BytesArray(((MockCompiledScript)compiledScript.compiled()).source); - } - }; + MockCompiledScript compiled = (MockCompiledScript) compiledScript.compiled(); + return compiled.createExecutableScript(vars); } @Override public SearchScript search(CompiledScript compiledScript, SearchLookup lookup, @Nullable Map vars) { - return new SearchScript() { - @Override - public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { - AbstractSearchScript leafSearchScript = new AbstractSearchScript() { - - @Override - public Object run() { - return ((MockCompiledScript)compiledScript.compiled()).source; - } - - }; - leafSearchScript.setLookup(lookup.getLeafSearchLookup(context)); - return leafSearchScript; - } - - @Override - public boolean needsScores() { - return false; - } - }; + MockCompiledScript compiled = (MockCompiledScript) compiledScript.compiled(); + return compiled.createSearchScript(vars, lookup); } @Override @@ -118,4 +86,111 @@ public class MockScriptEngine implements ScriptEngineService { public boolean isInlineScriptEnabled() { return true; } + + + public class MockCompiledScript { + + private final String name; + private final String source; + private final Map params; + private final Function, Object> script; + + public MockCompiledScript(String name, Map params, String source, Function, Object> script) { + this.name = name; + this.source = source; + this.params = params; + this.script = script; + } + + public String getName() { + return name; + } + + public ExecutableScript createExecutableScript(Map vars) { + Map context = new HashMap<>(); + if (params != null) { + context.putAll(params); + } + if (vars != null) { + context.putAll(vars); + } + return new MockExecutableScript(context, script != null ? script : ctx -> new BytesArray(source)); + } + + public SearchScript createSearchScript(Map vars, SearchLookup lookup) { + Map context = new HashMap<>(); + if (params != null) { + context.putAll(params); + } + if (vars != null) { + context.putAll(vars); + } + return new MockSearchScript(lookup, context, script != null ? script : ctx -> source); + } + } + + public class MockExecutableScript implements ExecutableScript { + + private final Function, Object> script; + private final Map vars; + + public MockExecutableScript(Map vars, Function, Object> script) { + this.vars = vars; + this.script = script; + } + + @Override + public void setNextVar(String name, Object value) { + vars.put(name, value); + } + + @Override + public Object run() { + return script.apply(vars); + } + } + + public class MockSearchScript implements SearchScript { + + private final Function, Object> script; + private final Map vars; + private final SearchLookup lookup; + + public MockSearchScript(SearchLookup lookup, Map vars, Function, Object> script) { + this.lookup = lookup; + this.vars = vars; + this.script = script; + } + + @Override + public LeafSearchScript getLeafSearchScript(LeafReaderContext context) throws IOException { + LeafSearchLookup leafLookup = lookup.getLeafSearchLookup(context); + + Map ctx = new HashMap<>(); + ctx.putAll(leafLookup.asMap()); + if (vars != null) { + ctx.putAll(vars); + } + + AbstractSearchScript leafSearchScript = new AbstractSearchScript() { + + @Override + public Object run() { + return script.apply(ctx); + } + + @Override + public void setNextVar(String name, Object value) { + ctx.put(name, value); + } + }; + leafSearchScript.setLookup(leafLookup); + return leafSearchScript; + } + + @Override + public boolean needsScores() { + return false; + } + } } diff --git a/core/src/main/java/org/elasticsearch/repositories/uri/URLIndexShardRepository.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptPlugin.java similarity index 54% rename from core/src/main/java/org/elasticsearch/repositories/uri/URLIndexShardRepository.java rename to test/framework/src/main/java/org/elasticsearch/script/MockScriptPlugin.java index 616a36d5066..dc397bd0b14 100644 --- a/core/src/main/java/org/elasticsearch/repositories/uri/URLIndexShardRepository.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptPlugin.java @@ -17,27 +17,30 @@ * under the License. */ -package org.elasticsearch.repositories.uri; +package org.elasticsearch.script; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardRepository; -import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.repositories.RepositoryName; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; + +import java.util.Map; +import java.util.function.Function; /** + * A script plugin that uses {@link MockScriptEngine} as the script engine for tests. */ -public class URLIndexShardRepository extends BlobStoreIndexShardRepository { - - @Inject - public URLIndexShardRepository(Settings settings, RepositoryName repositoryName, IndicesService indicesService, ClusterService clusterService) { - super(settings, repositoryName, indicesService, clusterService); - } +public abstract class MockScriptPlugin extends Plugin implements ScriptPlugin { + public static final String NAME = MockScriptEngine.NAME; @Override - public void verify(String seed) { - //TODO: Add verification that URL is accessible + public ScriptEngineService getScriptEngineService(Settings settings) { + return new MockScriptEngine(pluginScriptLang(), pluginScripts()); + } + + protected abstract Map, Object>> pluginScripts(); + + public String pluginScriptLang() { + return NAME; } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index c3693905501..d864dc732c0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -19,7 +19,6 @@ package org.elasticsearch.test; -import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.io.JsonStringEncoder; @@ -95,7 +94,7 @@ import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; -import org.elasticsearch.script.Script.ScriptParseException; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; @@ -128,7 +127,6 @@ import static org.hamcrest.Matchers.not; public abstract class AbstractQueryTestCase> extends ESTestCase { - private static final GeohashGenerator geohashGenerator = new GeohashGenerator(); public static final String STRING_FIELD_NAME = "mapped_string"; protected static final String STRING_FIELD_NAME_2 = "mapped_string_2"; protected static final String INT_FIELD_NAME = "mapped_int"; @@ -305,7 +303,7 @@ public abstract class AbstractQueryTestCase> try { parseQuery(testQuery); fail("some parsing exception expected for query: " + testQuery); - } catch (ParsingException | ScriptParseException | ElasticsearchParseException e) { + } catch (ParsingException | ElasticsearchParseException e) { // different kinds of exception wordings depending on location // of mutation, so no simple asserts possible here } catch (JsonParseException e) { @@ -326,11 +324,11 @@ public abstract class AbstractQueryTestCase> /** * Parses the query provided as string argument and compares it with the expected result provided as argument as a {@link QueryBuilder} */ - protected final static void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery) throws IOException { + protected static final void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery) throws IOException { assertParsedQuery(queryAsString, expectedQuery, ParseFieldMatcher.STRICT); } - protected final static void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery, ParseFieldMatcher matcher) throws IOException { + protected static final void assertParsedQuery(String queryAsString, QueryBuilder expectedQuery, ParseFieldMatcher matcher) throws IOException { QueryBuilder newQuery = parseQuery(queryAsString, matcher); assertNotSame(newQuery, expectedQuery); assertEquals(expectedQuery, newQuery); @@ -340,31 +338,31 @@ public abstract class AbstractQueryTestCase> /** * Parses the query provided as bytes argument and compares it with the expected result provided as argument as a {@link QueryBuilder} */ - protected final static void assertParsedQuery(BytesReference queryAsBytes, QueryBuilder expectedQuery) throws IOException { + protected static final void assertParsedQuery(BytesReference queryAsBytes, QueryBuilder expectedQuery) throws IOException { assertParsedQuery(queryAsBytes, expectedQuery, ParseFieldMatcher.STRICT); } - protected final static void assertParsedQuery(BytesReference queryAsBytes, QueryBuilder expectedQuery, ParseFieldMatcher matcher) throws IOException { + protected static final void assertParsedQuery(BytesReference queryAsBytes, QueryBuilder expectedQuery, ParseFieldMatcher matcher) throws IOException { QueryBuilder newQuery = parseQuery(queryAsBytes, matcher); assertNotSame(newQuery, expectedQuery); assertEquals(expectedQuery, newQuery); assertEquals(expectedQuery.hashCode(), newQuery.hashCode()); } - protected final static QueryBuilder parseQuery(String queryAsString) throws IOException { + protected static final QueryBuilder parseQuery(String queryAsString) throws IOException { return parseQuery(queryAsString, ParseFieldMatcher.STRICT); } - protected final static QueryBuilder parseQuery(String queryAsString, ParseFieldMatcher matcher) throws IOException { + protected static final QueryBuilder parseQuery(String queryAsString, ParseFieldMatcher matcher) throws IOException { XContentParser parser = XContentFactory.xContent(queryAsString).createParser(queryAsString); return parseQuery(parser, matcher); } - protected final static QueryBuilder parseQuery(BytesReference queryAsBytes) throws IOException { + protected static final QueryBuilder parseQuery(BytesReference queryAsBytes) throws IOException { return parseQuery(queryAsBytes, ParseFieldMatcher.STRICT); } - protected final static QueryBuilder parseQuery(BytesReference queryAsBytes, ParseFieldMatcher matcher) throws IOException { + protected static final QueryBuilder parseQuery(BytesReference queryAsBytes, ParseFieldMatcher matcher) throws IOException { XContentParser parser = XContentFactory.xContent(queryAsBytes).createParser(queryAsBytes); return parseQuery(parser, matcher); } @@ -513,7 +511,7 @@ public abstract class AbstractQueryTestCase> protected QueryBuilder assertSerialization(QueryBuilder testQuery) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { output.writeNamedWriteable(testQuery); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), serviceHolder.namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), serviceHolder.namedWriteableRegistry)) { QueryBuilder deserializedQuery = in.readNamedWriteable(QueryBuilder.class); assertEquals(testQuery, deserializedQuery); assertEquals(testQuery.hashCode(), deserializedQuery.hashCode()); @@ -562,7 +560,7 @@ public abstract class AbstractQueryTestCase> protected QB copyQuery(QB query) throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { output.writeNamedWriteable(query); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), serviceHolder.namedWriteableRegistry)) { + try (StreamInput in = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), serviceHolder.namedWriteableRegistry)) { return (QB) in.readNamedWriteable(QueryBuilder.class); } } @@ -685,18 +683,6 @@ public abstract class AbstractQueryTestCase> return (serviceHolder.currentTypes.length == 0) ? MetaData.ALL : randomFrom(serviceHolder.currentTypes); } - public static String randomGeohash(int minPrecision, int maxPrecision) { - return geohashGenerator.ofStringLength(random(), minPrecision, maxPrecision); - } - - public static class GeohashGenerator extends CodepointSetGenerator { - private final static char[] ASCII_SET = "0123456789bcdefghjkmnpqrstuvwxyz".toCharArray(); - - public GeohashGenerator() { - super(ASCII_SET); - } - } - protected static Fuzziness randomFuzziness(String fieldName) { switch (fieldName) { case INT_FIELD_NAME: @@ -880,14 +866,14 @@ public abstract class AbstractQueryTestCase> scriptSettings.addAll(pluginsService.getPluginSettings()); scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED); SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, pluginsService.getPluginSettingsFilter()); - searchModule = new SearchModule(settings, namedWriteableRegistry) { + searchModule = new SearchModule(settings, namedWriteableRegistry, false, pluginsService.filterPlugins(SearchPlugin.class)) { @Override protected void configureSearch() { // Skip me } }; ModulesBuilder modulesBuilder = new ModulesBuilder(); - for (Module pluginModule : pluginsService.nodeModules()) { + for (Module pluginModule : pluginsService.createGuiceModules()) { modulesBuilder.add(pluginModule); } modulesBuilder.add( diff --git a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java index 933f26e6e81..4440fbe117d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BackgroundIndexer.java @@ -49,7 +49,7 @@ public class BackgroundIndexer implements AutoCloseable { final Thread[] writers; final CountDownLatch stopLatch; - final CopyOnWriteArrayList failures; + final CopyOnWriteArrayList failures; final AtomicBoolean stop = new AtomicBoolean(false); final AtomicLong idGenerator = new AtomicLong(); final AtomicLong indexCounter = new AtomicLong(); @@ -169,7 +169,7 @@ public class BackgroundIndexer implements AutoCloseable { } } logger.info("**** done indexing thread {} stop: {} numDocsIndexed: {}", indexerId, stop.get(), indexCounter.get()); - } catch (Throwable e) { + } catch (Exception e) { failures.add(e); logger.warn("**** failed indexing thread {} on doc id {}", e, indexerId, id); } finally { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java index fe7ba74a327..a6d35930e6b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ClusterServiceUtils.java @@ -21,7 +21,6 @@ package org.elasticsearch.test; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.NodeConnectionsService; @@ -30,7 +29,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.threadpool.ThreadPool; import java.util.Arrays; @@ -46,7 +45,7 @@ public class ClusterServiceUtils { ClusterService clusterService = new ClusterService(Settings.builder().put("cluster.name", "ClusterServiceTests").build(), new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), threadPool); - clusterService.setLocalNode(new DiscoveryNode("node", DummyTransportAddress.INSTANCE, Collections.emptyMap(), + clusterService.setLocalNode(new DiscoveryNode("node", LocalTransportAddress.buildUnique(), Collections.emptyMap(), new HashSet<>(Arrays.asList(DiscoveryNode.Role.values())),Version.CURRENT)); clusterService.setNodeConnectionsService(new NodeConnectionsService(Settings.EMPTY, null, null) { @Override @@ -98,8 +97,8 @@ public class ClusterServiceUtils { } @Override - public void onFailure(String source, Throwable t) { - fail("unexpected exception" + t); + public void onFailure(String source, Exception e) { + fail("unexpected exception" + e); } }); try { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java index 5704a178f48..1aa0428454e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESAllocationTestCase.java @@ -42,7 +42,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationD import org.elasticsearch.common.Randomness; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.DummyTransportAddress; +import org.elasticsearch.common.transport.LocalTransportAddress; import org.elasticsearch.gateway.AsyncShardFetch; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.gateway.ReplicaShardAllocator; @@ -133,19 +133,19 @@ public abstract class ESAllocationTestCase extends ESTestCase { } protected static DiscoveryNode newNode(String nodeName, String nodeId, Map attributes) { - return new DiscoveryNode(nodeName, nodeId, DummyTransportAddress.INSTANCE, attributes, MASTER_DATA_ROLES, Version.CURRENT); + return new DiscoveryNode(nodeName, nodeId, LocalTransportAddress.buildUnique(), attributes, MASTER_DATA_ROLES, Version.CURRENT); } protected static DiscoveryNode newNode(String nodeId, Map attributes) { - return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE, attributes, MASTER_DATA_ROLES, Version.CURRENT); + return new DiscoveryNode(nodeId, LocalTransportAddress.buildUnique(), attributes, MASTER_DATA_ROLES, Version.CURRENT); } protected static DiscoveryNode newNode(String nodeId, Set roles) { - return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE, emptyMap(), roles, Version.CURRENT); + return new DiscoveryNode(nodeId, LocalTransportAddress.buildUnique(), emptyMap(), roles, Version.CURRENT); } protected static DiscoveryNode newNode(String nodeId, Version version) { - return new DiscoveryNode(nodeId, DummyTransportAddress.INSTANCE, emptyMap(), MASTER_DATA_ROLES, version); + return new DiscoveryNode(nodeId, LocalTransportAddress.buildUnique(), emptyMap(), MASTER_DATA_ROLES, version); } protected static ClusterState startRandomInitializingShard(ClusterState clusterState, AllocationService strategy) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java index 69bacfd3ffa..3414e9eda10 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -241,8 +242,8 @@ public abstract class ESBackcompatTestCase extends ESIntegTestCase { protected Settings commonNodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder().put(requiredSettings()); - builder.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty"); // run same transport / disco as external - builder.put(Node.NODE_MODE_SETTING.getKey(), "network"); + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, "netty3"); // run same transport / disco as external + builder.put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen"); return builder.build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index a49c33f5be8..73edf000b96 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -23,12 +23,13 @@ import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.http.HttpHost; -import org.apache.http.impl.client.CloseableHttpClient; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TestUtil; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.transport.MockTcpTransportPlugin; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; @@ -106,7 +107,6 @@ import org.elasticsearch.index.MergeSchedulerConfig; import org.elasticsearch.index.MockEngineFactoryPlugin; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.internal.TimestampFieldMapper; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.IndicesRequestCache; @@ -405,7 +405,7 @@ public abstract class ESIntegTestCase extends ESTestCase { .setOrder(0) .setSettings(randomSettingsBuilder); if (mappings != null) { - logger.info("test using _default_ mappings: [{}]", mappings.bytes().toUtf8()); + logger.info("test using _default_ mappings: [{}]", mappings.bytes().utf8ToString()); putTemplate.addMapping("_default_", mappings); } assertAcked(putTemplate.execute().actionGet()); @@ -925,7 +925,7 @@ public abstract class ESIntegTestCase extends ESTestCase { * This saves on unneeded searches. * @return the actual number of docs seen. */ - public long waitForDocs(final long numDocs, final @Nullable BackgroundIndexer indexer) throws InterruptedException { + public long waitForDocs(final long numDocs, @Nullable final BackgroundIndexer indexer) throws InterruptedException { // indexing threads can wait for up to ~1m before retrying when they first try to index into a shard which is not STARTED. return waitForDocs(numDocs, 90, TimeUnit.SECONDS, indexer); } @@ -940,7 +940,7 @@ public abstract class ESIntegTestCase extends ESTestCase { * This saves on unneeded searches. * @return the actual number of docs seen. */ - public long waitForDocs(final long numDocs, int maxWaitTime, TimeUnit maxWaitTimeUnit, final @Nullable BackgroundIndexer indexer) + public long waitForDocs(final long numDocs, int maxWaitTime, TimeUnit maxWaitTimeUnit, @Nullable final BackgroundIndexer indexer) throws InterruptedException { final AtomicLong lastKnownCount = new AtomicLong(-1); long lastStartCount = -1; @@ -956,7 +956,7 @@ public abstract class ESIntegTestCase extends ESTestCase { client().admin().indices().prepareRefresh().get(); } lastKnownCount.set(count); - } catch (Throwable e) { // count now acts like search and barfs if all shards failed... + } catch (Exception e) { // count now acts like search and barfs if all shards failed... logger.debug("failed to executed count", e); return false; } @@ -1334,7 +1334,7 @@ public abstract class ESIntegTestCase extends ESTestCase { } final String[] indices = indicesSet.toArray(new String[indicesSet.size()]); Collections.shuffle(builders, random()); - final CopyOnWriteArrayList> errors = new CopyOnWriteArrayList<>(); + final CopyOnWriteArrayList> errors = new CopyOnWriteArrayList<>(); List inFlightAsyncOperations = new ArrayList<>(); // If you are indexing just a few documents then frequently do it one at a time. If many then frequently in bulk. if (builders.size() < FREQUENT_BULK_THRESHOLD ? frequently() : builders.size() < ALWAYS_BULK_THRESHOLD ? rarely() : false) { @@ -1367,8 +1367,8 @@ public abstract class ESIntegTestCase extends ESTestCase { for (CountDownLatch operation : inFlightAsyncOperations) { operation.await(); } - final List actualErrors = new ArrayList<>(); - for (Tuple tuple : errors) { + final List actualErrors = new ArrayList<>(); + for (Tuple tuple : errors) { if (ExceptionsHelper.unwrapCause(tuple.v2()) instanceof EsRejectedExecutionException) { tuple.v1().execute().actionGet(); // re-index if rejected } else { @@ -1526,7 +1526,7 @@ public abstract class ESIntegTestCase extends ESTestCase { } @Override - public final void onFailure(Throwable t) { + public final void onFailure(Exception t) { try { logger.info("Action Failed", t); addError(t); @@ -1535,24 +1535,24 @@ public abstract class ESIntegTestCase extends ESTestCase { } } - protected void addError(Throwable t) { + protected void addError(Exception e) { } } private class PayloadLatchedActionListener extends LatchedActionListener { - private final CopyOnWriteArrayList> errors; + private final CopyOnWriteArrayList> errors; private final T builder; - public PayloadLatchedActionListener(T builder, CountDownLatch latch, CopyOnWriteArrayList> errors) { + public PayloadLatchedActionListener(T builder, CountDownLatch latch, CopyOnWriteArrayList> errors) { super(latch); this.errors = errors; this.builder = builder; } @Override - protected void addError(Throwable t) { - errors.add(new Tuple<>(builder, t)); + protected void addError(Exception e) { + errors.add(new Tuple<>(builder, e)); } } @@ -1688,9 +1688,13 @@ public abstract class ESIntegTestCase extends ESTestCase { return Settings.EMPTY; } + protected boolean ignoreExternalCluster() { + return false; + } + protected TestCluster buildTestCluster(Scope scope, long seed) throws IOException { String clusterAddresses = System.getProperty(TESTS_CLUSTER); - if (Strings.hasLength(clusterAddresses)) { + if (Strings.hasLength(clusterAddresses) && ignoreExternalCluster() == false) { if (scope == Scope.TEST) { throw new IllegalArgumentException("Cannot run TEST scope test with " + TESTS_CLUSTER); } @@ -1708,28 +1712,7 @@ public abstract class ESIntegTestCase extends ESTestCase { default: throw new ElasticsearchException("Scope not supported: " + scope); } - NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { - @Override - public Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false). - put(ESIntegTestCase.this.nodeSettings(nodeOrdinal)).build(); - } - @Override - public Collection> nodePlugins() { - return ESIntegTestCase.this.nodePlugins(); - } - - @Override - public Settings transportClientSettings() { - return ESIntegTestCase.this.transportClientSettings(); - } - - @Override - public Collection> transportClientPlugins() { - return ESIntegTestCase.this.transportClientPlugins(); - } - }; boolean supportsDedicatedMasters = getSupportsDedicatedMasters(); int numDataNodes = getNumDataNodes(); @@ -1741,22 +1724,90 @@ public abstract class ESIntegTestCase extends ESTestCase { minNumDataNodes = getMinNumDataNodes(); maxNumDataNodes = getMaxNumDataNodes(); } + Collection> mockPlugins = getMockPlugins(); + final NodeConfigurationSource nodeConfigurationSource = getNodeConfigSource(); + if (addMockTransportService()) { + ArrayList> mocks = new ArrayList<>(mockPlugins); + // add both mock plugins - local and tcp if they are not there + // we do this in case somebody overrides getMockPlugins and misses to call super + if (mockPlugins.contains(AssertingLocalTransport.TestPlugin.class) == false) { + mocks.add(AssertingLocalTransport.TestPlugin.class); + } + if (mockPlugins.contains(MockTcpTransportPlugin.class) == false) { + mocks.add(MockTcpTransportPlugin.class); + } + mockPlugins = mocks; + } + return new InternalTestCluster(seed, createTempDir(), supportsDedicatedMasters, minNumDataNodes, maxNumDataNodes, + InternalTestCluster.clusterName(scope.name(), seed) + "-cluster", nodeConfigurationSource, getNumClientNodes(), + InternalTestCluster.DEFAULT_ENABLE_HTTP_PIPELINING, nodePrefix, mockPlugins, getClientWrapper()); + } + + protected NodeConfigurationSource getNodeConfigSource() { SuppressLocalMode noLocal = getAnnotation(this.getClass(), SuppressLocalMode.class); SuppressNetworkMode noNetwork = getAnnotation(this.getClass(), SuppressNetworkMode.class); - String nodeMode = InternalTestCluster.configuredNodeMode(); + Settings.Builder networkSettings = Settings.builder(); + final boolean isNetwork; if (noLocal != null && noNetwork != null) { throw new IllegalStateException("Can't suppress both network and local mode"); } else if (noLocal != null) { - nodeMode = "network"; - } else if (noNetwork != null) { - nodeMode = "local"; + if (addMockTransportService()) { + networkSettings.put(NetworkModule.TRANSPORT_TYPE_KEY, MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME); + } + isNetwork = true; + } else { + if (addMockTransportService()) { + networkSettings.put(NetworkModule.TRANSPORT_TYPE_KEY, AssertingLocalTransport.ASSERTING_TRANSPORT_NAME); + } else { + networkSettings.put(NetworkModule.TRANSPORT_TYPE_KEY, "local"); + } + isNetwork = false; } - Collection> mockPlugins = getMockPlugins(); + NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { + @Override + public Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(NetworkModule.HTTP_ENABLED.getKey(), false) + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), + isNetwork ? DiscoveryModule.DISCOVERY_TYPE_SETTING.getDefault(Settings.EMPTY) : "local") + .put(networkSettings.build()). + put(ESIntegTestCase.this.nodeSettings(nodeOrdinal)).build(); + } - return new InternalTestCluster(nodeMode, seed, createTempDir(), supportsDedicatedMasters, minNumDataNodes, maxNumDataNodes, - InternalTestCluster.clusterName(scope.name(), seed) + "-cluster", nodeConfigurationSource, getNumClientNodes(), - InternalTestCluster.DEFAULT_ENABLE_HTTP_PIPELINING, nodePrefix, mockPlugins, getClientWrapper()); + @Override + public Collection> nodePlugins() { + return ESIntegTestCase.this.nodePlugins(); + } + + @Override + public Settings transportClientSettings() { + return Settings.builder().put(networkSettings.build()) + .put(ESIntegTestCase.this.transportClientSettings()).build(); + } + + @Override + public Collection> transportClientPlugins() { + Collection> plugins = ESIntegTestCase.this.transportClientPlugins(); + if (isNetwork && plugins.contains(MockTcpTransportPlugin.class) == false) { + plugins = new ArrayList<>(plugins); + plugins.add(MockTcpTransportPlugin.class); + } else if (isNetwork == false && plugins.contains(AssertingLocalTransport.class) == false) { + plugins = new ArrayList<>(plugins); + plugins.add(AssertingLocalTransport.TestPlugin.class); + } + return Collections.unmodifiableCollection(plugins); + } + }; + return nodeConfigurationSource; + } + + /** + * Iff this returns true mock transport implementations are used for the test runs. Otherwise not mock transport impls are used. + * The defautl is true + */ + protected boolean addMockTransportService() { + return true; } /** @@ -1772,7 +1823,7 @@ public abstract class ESIntegTestCase extends ESTestCase { protected Collection> getMockPlugins() { final ArrayList> mocks = new ArrayList<>(); if (randomBoolean()) { // sometimes run without those completely - if (randomBoolean()) { + if (randomBoolean() && addMockTransportService()) { mocks.add(MockTransportService.TestPlugin.class); } if (randomBoolean()) { @@ -1787,9 +1838,11 @@ public abstract class ESIntegTestCase extends ESTestCase { if (randomBoolean()) { mocks.add(MockSearchService.TestPlugin.class); } - if (randomBoolean()) { - mocks.add(AssertingLocalTransport.TestPlugin.class); - } + } + + if (addMockTransportService()) { + mocks.add(AssertingLocalTransport.TestPlugin.class); + mocks.add(MockTcpTransportPlugin.class); } mocks.add(TestSeedPlugin.class); return Collections.unmodifiableList(mocks); @@ -1800,7 +1853,6 @@ public abstract class ESIntegTestCase extends ESTestCase { public List> getSettings() { return Arrays.asList(INDEX_TEST_SEED_SETTING); } - } /** @@ -2035,18 +2087,18 @@ public abstract class ESIntegTestCase extends ESTestCase { * The returned client gets automatically closed when needed, it shouldn't be closed as part of tests otherwise * it cannot be reused by other tests anymore. */ - protected synchronized static RestClient getRestClient() { + protected static synchronized RestClient getRestClient() { if (restClient == null) { restClient = createRestClient(null); } return restClient; } - protected static RestClient createRestClient(CloseableHttpClient httpClient) { - return createRestClient(httpClient, "http"); + protected static RestClient createRestClient(RestClient.HttpClientConfigCallback httpClientConfigCallback) { + return createRestClient(httpClientConfigCallback, "http"); } - protected static RestClient createRestClient(CloseableHttpClient httpClient, String protocol) { + protected static RestClient createRestClient(RestClient.HttpClientConfigCallback httpClientConfigCallback, String protocol) { final NodesInfoResponse nodeInfos = client().admin().cluster().prepareNodesInfo().get(); final List nodes = nodeInfos.getNodes(); assertFalse(nodeInfos.hasFailures()); @@ -2060,8 +2112,8 @@ public abstract class ESIntegTestCase extends ESTestCase { } } RestClient.Builder builder = RestClient.builder(hosts.toArray(new HttpHost[hosts.size()])); - if (httpClient != null) { - builder.setHttpClient(httpClient); + if (httpClientConfigCallback != null) { + builder.setHttpClientConfigCallback(httpClientConfigCallback); } return builder.build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 8f06e75b038..0ae94057ecb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -30,6 +30,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; @@ -177,14 +178,14 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { .put("script.inline", "true") .put("script.stored", "true") .put(EsExecutors.PROCESSORS_SETTING.getKey(), 1) // limit the number of threads created - .put("http.enabled", false) - .put(Node.NODE_LOCAL_SETTING.getKey(), true) + .put(NetworkModule.HTTP_ENABLED.getKey(), false) + .put("discovery.type", "local") + .put("transport.type", "local") .put(Node.NODE_DATA_SETTING.getKey(), true) .put(nodeSettings()) // allow test cases to provide their own settings or override these .build(); Node build = new MockNode(settings, getPlugins()); build.start(); - assertThat(DiscoveryNode.isLocalNode(build.settings()), is(true)); return build; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index d04d12304de..c9ecf2ec639 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -24,6 +24,7 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope.Scope; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; +import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator; import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; @@ -92,6 +93,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Random; import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ExecutorService; @@ -344,9 +346,15 @@ public abstract class ESTestCase extends LuceneTestCase { /** Pick a random object from the given array. The array must not be empty. */ public static T randomFrom(T... array) { - return RandomPicks.randomFrom(random(), array); + return randomFrom(random(), array); } + /** Pick a random object from the given array. The array must not be empty. */ + public static T randomFrom(Random random, T... array) { + return RandomPicks.randomFrom(random, array); + } + + /** Pick a random object from the given list. */ public static T randomFrom(List list) { return RandomPicks.randomFrom(random(), list); @@ -656,6 +664,20 @@ public abstract class ESTestCase extends LuceneTestCase { return things; } + public static String randomGeohash(int minPrecision, int maxPrecision) { + return geohashGenerator.ofStringLength(random(), minPrecision, maxPrecision); + } + + private static final GeohashGenerator geohashGenerator = new GeohashGenerator(); + + public static class GeohashGenerator extends CodepointSetGenerator { + private static final char[] ASCII_SET = "0123456789bcdefghjkmnpqrstuvwxyz".toCharArray(); + + public GeohashGenerator() { + super(ASCII_SET); + } + } + /** * Randomly shuffles the fields inside objects in the {@link XContentBuilder} passed in. * Recursively goes through inner objects and also shuffles them. Exceptions for this diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java index 8216f67cfed..b5a5778c1e5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalNode.java @@ -32,7 +32,7 @@ import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.DiscoveryModule; -import org.elasticsearch.node.Node; +import org.elasticsearch.transport.MockTransportClient; import java.io.Closeable; import java.io.IOException; @@ -51,7 +51,7 @@ final class ExternalNode implements Closeable { public static final Settings REQUIRED_SETTINGS = Settings.builder() .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "zen") - .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we need network mode for this + .put(NetworkModule.TRANSPORT_TYPE_KEY, "netty3").build(); // we need network mode for this private final Path path; private final Random random; @@ -106,8 +106,6 @@ final class ExternalNode implements Closeable { case "cluster.name": case "node.name": case "path.home": - case "node.mode": - case "node.local": case NetworkModule.TRANSPORT_TYPE_KEY: case "discovery.type": case NetworkModule.TRANSPORT_SERVICE_TYPE_KEY: @@ -190,7 +188,7 @@ final class ExternalNode implements Closeable { .put("client.transport.nodes_sampler_interval", "1s") .put("node.name", "transport_client_" + nodeInfo.getNode().getName()) .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), clusterName).put("client.transport.sniff", false).build(); - TransportClient client = TransportClient.builder().settings(clientSettings).build(); + TransportClient client = new MockTransportClient(clientSettings); client.addTransportAddress(addr); this.client = client; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java index 71fe622d8c0..6f6ac8488d1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ExternalTestCluster.java @@ -29,17 +29,19 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.Environment; -import org.elasticsearch.node.Node; -import org.elasticsearch.node.internal.InternalSettingsPreparer; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.transport.MockTcpTransportPlugin; +import org.elasticsearch.transport.MockTransportClient; import java.io.IOException; import java.net.InetSocketAddress; import java.nio.file.Path; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.concurrent.atomic.AtomicInteger; @@ -70,18 +72,22 @@ public final class ExternalTestCluster extends TestCluster { public ExternalTestCluster(Path tempDir, Settings additionalSettings, Collection> pluginClasses, TransportAddress... transportAddresses) { super(0); - Settings clientSettings = Settings.builder() - .put(additionalSettings) - .put("node.name", InternalTestCluster.TRANSPORT_CLIENT_PREFIX + EXTERNAL_CLUSTER_PREFIX + counter.getAndIncrement()) - .put("client.transport.ignore_cluster_name", true) - .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) - .put(Node.NODE_MODE_SETTING.getKey(), "network").build(); // we require network here! + Settings.Builder clientSettingsBuilder = Settings.builder() + .put(additionalSettings) + .put("node.name", InternalTestCluster.TRANSPORT_CLIENT_PREFIX + EXTERNAL_CLUSTER_PREFIX + counter.getAndIncrement()) + .put("client.transport.ignore_cluster_name", true) + .put(Environment.PATH_HOME_SETTING.getKey(), tempDir); + boolean addMockTcpTransport = additionalSettings.get(NetworkModule.TRANSPORT_TYPE_KEY) == null; - TransportClient.Builder transportClientBuilder = TransportClient.builder().settings(clientSettings); - for (Class pluginClass : pluginClasses) { - transportClientBuilder.addPlugin(pluginClass); + if (addMockTcpTransport) { + clientSettingsBuilder.put(NetworkModule.TRANSPORT_TYPE_KEY, MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME); + if (pluginClasses.contains(MockTcpTransportPlugin.class) == false) { + pluginClasses = new ArrayList<>(pluginClasses); + pluginClasses.add(MockTcpTransportPlugin.class); + } } - TransportClient client = transportClientBuilder.build(); + Settings clientSettings = clientSettingsBuilder.build(); + TransportClient client = new MockTransportClient(clientSettings, pluginClasses); try { client.addTransportAddresses(transportAddresses); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index b6b75b1ec64..4dbc59f1eca 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -40,7 +40,6 @@ import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode.Role; -import org.elasticsearch.cluster.node.DiscoveryNodeService; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.ShardRouting; @@ -54,6 +53,7 @@ import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings.Builder; import org.elasticsearch.common.transport.InetSocketTransportAddress; @@ -87,13 +87,12 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; -import org.elasticsearch.test.transport.AssertingLocalTransport; import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.transport.MockTransportClient; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportSettings; -import org.elasticsearch.transport.netty.NettyTransport; import org.junit.Assert; import java.io.Closeable; @@ -227,19 +226,14 @@ public final class InternalTestCluster extends TestCluster { private final Path baseDir; private ServiceDisruptionScheme activeDisruptionScheme; - private String nodeMode; private Function clientWrapper; - public InternalTestCluster(String nodeMode, long clusterSeed, Path baseDir, + public InternalTestCluster(long clusterSeed, Path baseDir, boolean randomlyAddDedicatedMasters, int minNumDataNodes, int maxNumDataNodes, String clusterName, NodeConfigurationSource nodeConfigurationSource, int numClientNodes, boolean enableHttpPipelining, String nodePrefix, Collection> mockPlugins, Function clientWrapper) { super(clusterSeed); - if ("network".equals(nodeMode) == false && "local".equals(nodeMode) == false) { - throw new IllegalArgumentException("Unknown nodeMode: " + nodeMode); - } this.clientWrapper = clientWrapper; - this.nodeMode = nodeMode; this.baseDir = baseDir; this.clusterName = clusterName; if (minNumDataNodes < 0 || maxNumDataNodes < 0) { @@ -311,7 +305,6 @@ public final class InternalTestCluster extends TestCluster { builder.put(Environment.PATH_REPO_SETTING.getKey(), baseDir.resolve("repos")); builder.put(TransportSettings.PORT.getKey(), TRANSPORT_BASE_PORT + "-" + (TRANSPORT_BASE_PORT + PORTS_PER_CLUSTER)); builder.put("http.port", HTTP_BASE_PORT + "-" + (HTTP_BASE_PORT + PORTS_PER_CLUSTER)); - builder.put(Node.NODE_MODE_SETTING.getKey(), nodeMode); builder.put("http.pipelining", enableHttpPipelining); if (Strings.hasLength(System.getProperty("tests.es.logger.level"))) { builder.put("logger.level", System.getProperty("tests.es.logger.level")); @@ -336,24 +329,6 @@ public final class InternalTestCluster extends TestCluster { executor = EsExecutors.newScaling("test runner", 0, Integer.MAX_VALUE, 0, TimeUnit.SECONDS, EsExecutors.daemonThreadFactory("test_" + clusterName), new ThreadContext(Settings.EMPTY)); } - public static String configuredNodeMode() { - Builder builder = Settings.builder(); - if (Strings.isEmpty(System.getProperty("tests.es.node.mode")) && Strings.isEmpty(System.getProperty("tests.node.local"))) { - return "local"; // default if nothing is specified - } - if (Strings.hasLength(System.getProperty("tests.es.node.mode"))) { - builder.put(Node.NODE_MODE_SETTING.getKey(), System.getProperty("tests.es.node.mode")); - } - if (Strings.hasLength(System.getProperty("tests.es.node.local"))) { - builder.put(Node.NODE_LOCAL_SETTING.getKey(), System.getProperty("tests.es.node.local")); - } - if (DiscoveryNode.isLocalNode(builder.build())) { - return "local"; - } else { - return "network"; - } - } - @Override public String getClusterName() { return clusterName; @@ -363,10 +338,6 @@ public final class InternalTestCluster extends TestCluster { return nodes.keySet().toArray(Strings.EMPTY_ARRAY); } - private boolean isLocalTransportConfigured() { - return "local".equals(nodeMode); - } - private Settings getSettings(int nodeOrdinal, long nodeSeed, Settings others) { Builder builder = Settings.builder().put(defaultSettings) .put(getRandomNodeSettings(nodeSeed)); @@ -387,19 +358,13 @@ public final class InternalTestCluster extends TestCluster { private Collection> getPlugins() { Set> plugins = new HashSet<>(nodeConfigurationSource.nodePlugins()); plugins.addAll(mockPlugins); - if (isLocalTransportConfigured() == false) { - // this is crazy we must do this here...we should really just always be using local transport... - plugins.remove(AssertingLocalTransport.TestPlugin.class); - } return plugins; } private Settings getRandomNodeSettings(long seed) { Random random = new Random(seed); Builder builder = Settings.builder(); - if (isLocalTransportConfigured() == false) { - builder.put(Transport.TRANSPORT_TCP_COMPRESS.getKey(), rarely(random)); - } + builder.put(Transport.TRANSPORT_TCP_COMPRESS.getKey(), rarely(random)); if (random.nextBoolean()) { builder.put("cache.recycler.page.type", RandomPicks.randomFrom(random, PageCacheRecycler.Type.values())); } @@ -419,9 +384,8 @@ public final class InternalTestCluster extends TestCluster { } } - // randomize netty settings + // randomize tcp settings if (random.nextBoolean()) { - builder.put(NettyTransport.WORKER_COUNT.getKey(), random.nextInt(3) + 1); builder.put(TcpTransport.CONNECTIONS_PER_NODE_RECOVERY.getKey(), random.nextInt(2) + 1); builder.put(TcpTransport.CONNECTIONS_PER_NODE_BULK.getKey(), random.nextInt(3) + 1); builder.put(TcpTransport.CONNECTIONS_PER_NODE_REG.getKey(), random.nextInt(6) + 1); @@ -606,7 +570,7 @@ public final class InternalTestCluster extends TestCluster { .put(Environment.PATH_HOME_SETTING.getKey(), baseDir) // allow overriding path.home .put(settings) .put("node.name", name) - .put(DiscoveryNodeService.NODE_ID_SEED_SETTING.getKey(), seed) + .put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), seed) .build(); MockNode node = new MockNode(finalSettings, plugins); return new NodeAndClient(name, node, nodeId); @@ -776,10 +740,6 @@ public final class InternalTestCluster extends TestCluster { } } - public String getNodeMode() { - return nodeMode; - } - private final class NodeAndClient implements Closeable { private MockNode node; private Client nodeClient; @@ -847,7 +807,7 @@ public final class InternalTestCluster extends TestCluster { /* no sniff client for now - doesn't work will all tests since it might throw NoNodeAvailableException if nodes are shut down. * we first need support of transportClientRatio as annotations or so */ - transportClient = new TransportClientFactory(false, nodeConfigurationSource.transportClientSettings(), baseDir, nodeMode, nodeConfigurationSource.transportClientPlugins()).client(node, clusterName); + transportClient = new TransportClientFactory(false, nodeConfigurationSource.transportClientSettings(), baseDir, nodeConfigurationSource.transportClientPlugins()).client(node, clusterName); } return clientWrapper.apply(transportClient); } @@ -898,8 +858,8 @@ public final class InternalTestCluster extends TestCluster { } private void createNewNode(final Settings newSettings) { - final long newIdSeed = DiscoveryNodeService.NODE_ID_SEED_SETTING.get(node.settings()) + 1; // use a new seed to make sure we have new node id - Settings finalSettings = Settings.builder().put(node.settings()).put(newSettings).put(DiscoveryNodeService.NODE_ID_SEED_SETTING.getKey(), newIdSeed).build(); + final long newIdSeed = NodeEnvironment.NODE_ID_SEED_SETTING.get(node.settings()) + 1; // use a new seed to make sure we have new node id + Settings finalSettings = Settings.builder().put(node.settings()).put(newSettings).put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), newIdSeed).build(); Collection> plugins = node.getPlugins(); node = new MockNode(finalSettings, plugins); markNodeDataDirsAsNotEligableForWipe(node); @@ -922,14 +882,12 @@ public final class InternalTestCluster extends TestCluster { private final boolean sniff; private final Settings settings; private final Path baseDir; - private final String nodeMode; private final Collection> plugins; - TransportClientFactory(boolean sniff, Settings settings, Path baseDir, String nodeMode, Collection> plugins) { + TransportClientFactory(boolean sniff, Settings settings, Path baseDir, Collection> plugins) { this.sniff = sniff; this.settings = settings != null ? settings : Settings.EMPTY; this.baseDir = baseDir; - this.nodeMode = nodeMode; this.plugins = plugins; } @@ -941,20 +899,13 @@ public final class InternalTestCluster extends TestCluster { .put(Environment.PATH_HOME_SETTING.getKey(), baseDir) .put("node.name", TRANSPORT_CLIENT_PREFIX + node.settings().get("node.name")) .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), clusterName).put("client.transport.sniff", sniff) - .put(Node.NODE_MODE_SETTING.getKey(), Node.NODE_MODE_SETTING.exists(nodeSettings) ? Node.NODE_MODE_SETTING.get(nodeSettings) : nodeMode) .put("logger.prefix", nodeSettings.get("logger.prefix", "")) .put("logger.level", nodeSettings.get("logger.level", "INFO")) .put(settings); - - if (Node.NODE_LOCAL_SETTING.exists(nodeSettings)) { - builder.put(Node.NODE_LOCAL_SETTING.getKey(), Node.NODE_LOCAL_SETTING.get(nodeSettings)); + if ( NetworkModule.TRANSPORT_TYPE_SETTING.exists(settings)) { + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, NetworkModule.TRANSPORT_TYPE_SETTING.get(settings)); } - - TransportClient.Builder clientBuilder = TransportClient.builder().settings(builder.build()); - for (Class plugin : plugins) { - clientBuilder.addPlugin(plugin); - } - TransportClient client = clientBuilder.build(); + TransportClient client = new MockTransportClient(builder.build(), plugins); client.addTransportAddress(addr); return client; } @@ -1337,7 +1288,7 @@ public final class InternalTestCluster extends TestCluster { /** * Restarts a node and calls the callback during restart. */ - synchronized public void restartNode(String nodeName, RestartCallback callback) throws Exception { + public synchronized void restartNode(String nodeName, RestartCallback callback) throws Exception { ensureOpen(); NodeAndClient nodeAndClient = nodes.get(nodeName); if (nodeAndClient != null) { @@ -1346,7 +1297,7 @@ public final class InternalTestCluster extends TestCluster { } } - synchronized private void restartAllNodes(boolean rollingRestart, RestartCallback callback) throws Exception { + private synchronized void restartAllNodes(boolean rollingRestart, RestartCallback callback) throws Exception { ensureOpen(); List toRemove = new ArrayList<>(); try { @@ -1393,7 +1344,6 @@ public final class InternalTestCluster extends TestCluster { // delete data folders now, before we start other nodes that may claim it nodeAndClient.clearDataIfNeeded(callback); - DiscoveryNode discoveryNode = getInstanceFromNode(ClusterService.class, nodeAndClient.node()).localNode(); nodesRoleOrder[nodeAndClient.nodeAndClientId()] = discoveryNode.getRoles(); nodesByRoles.computeIfAbsent(discoveryNode.getRoles(), k -> new ArrayList<>()).add(nodeAndClient); @@ -1481,7 +1431,7 @@ public final class InternalTestCluster extends TestCluster { Client client = viaNode != null ? client(viaNode) : client(); ClusterState state = client.admin().cluster().prepareState().execute().actionGet().getState(); return state.nodes().getMasterNode().getName(); - } catch (Throwable e) { + } catch (Exception e) { logger.warn("Can't fetch cluster state", e); throw new RuntimeException("Can't get master node " + e.getMessage(), e); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java index 2d09e60eea4..c4a9515f545 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java +++ b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java @@ -70,7 +70,7 @@ public final class MockIndexEventListener { } @Override - public Collection nodeModules() { + public Collection createGuiceModules() { return Collections.singleton(binder -> binder.bind(TestEventListener.class).toInstance(listener)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java b/test/framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java index e04e840e525..6d8d36e3d11 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java +++ b/test/framework/src/main/java/org/elasticsearch/test/NodeConfigurationSource.java @@ -29,12 +29,12 @@ public abstract class NodeConfigurationSource { public static final NodeConfigurationSource EMPTY = new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { - return null; + return Settings.EMPTY; } @Override public Settings transportClientSettings() { - return null; + return Settings.EMPTY; } }; @@ -48,7 +48,9 @@ public abstract class NodeConfigurationSource { return Collections.emptyList(); } - public abstract Settings transportClientSettings(); + public Settings transportClientSettings() { + return Settings.EMPTY; + } /** Returns plugins that should be loaded in the transport client */ public Collection> transportClientPlugins() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java b/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java index 10469286e1a..1d0eaa7ce51 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/StreamsUtils.java @@ -19,6 +19,7 @@ package org.elasticsearch.test; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -53,7 +54,7 @@ public class StreamsUtils { } try (BytesStreamOutput out = new BytesStreamOutput()) { Streams.copy(is, out); - return out.bytes().toBytes(); + return BytesReference.toBytes(out.bytes()); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java index 5525baf4206..d09c763322c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/VersionUtils.java @@ -44,7 +44,7 @@ public class VersionUtils { try { Version object = (Version) field.get(null); ids.add(object.id); - } catch (Throwable e) { + } catch (IllegalAccessException e) { throw new RuntimeException(e); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java index 6ff45608700..1d91b0980e4 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java @@ -50,8 +50,8 @@ public class NoOpClient extends AbstractClient { public void close() { try { ThreadPool.terminate(threadPool(), 10, TimeUnit.SECONDS); - } catch (Throwable t) { - throw new ElasticsearchException(t.getMessage(), t); + } catch (Exception e) { + throw new ElasticsearchException(e.getMessage(), e); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java index 7c832118ca0..67d7b99171d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java @@ -21,6 +21,7 @@ package org.elasticsearch.test.discovery; import com.carrotsearch.randomizedtesting.RandomizedTest; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkUtils; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; @@ -116,7 +117,7 @@ public class ClusterDiscoveryConfiguration extends NodeConfigurationSource { // we need to pin the node port & host so we'd know where to point things builder.put(TransportSettings.PORT.getKey(), unicastHostPorts[nodeOrdinal]); builder.put(TransportSettings.HOST.getKey(), IP_ADDR); // only bind on one IF we use v4 here by default - builder.put("http.enabled", false); + builder.put(NetworkModule.HTTP_ENABLED.getKey(), false); for (int i = 0; i < unicastHostOrdinals.length; i++) { unicastHosts[i] = IP_ADDR + ":" + (unicastHostPorts[unicastHostOrdinals[i]]); } @@ -126,7 +127,7 @@ public class ClusterDiscoveryConfiguration extends NodeConfigurationSource { } @SuppressForbidden(reason = "we know we pass a IP address") - protected synchronized static int[] unicastHostPorts(int numHosts) { + protected static synchronized int[] unicastHostPorts(int numHosts) { int[] unicastHostPorts = new int[numHosts]; final int basePort = calcBasePort(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java index cbcb9766943..956088f0fd1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/BlockClusterStateProcessing.java @@ -76,8 +76,8 @@ public class BlockClusterStateProcessing extends SingleNodeDisruption { } @Override - public void onFailure(String source, Throwable t) { - logger.error("unexpected error during disruption", t); + public void onFailure(String source, Exception e) { + logger.error("unexpected error during disruption", e); } }); try { diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/BridgePartition.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/BridgePartition.java new file mode 100644 index 00000000000..1a9c2b686c3 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/BridgePartition.java @@ -0,0 +1,74 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test.disruption; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.InternalTestCluster; +import org.elasticsearch.test.transport.MockTransportService; + +import java.util.Random; + +import static org.elasticsearch.test.ESTestCase.randomFrom; + +/** + * A partition that breaks the cluster into two groups of nodes. The two groups are fully isolated + * with the exception of a single node that can see and be seen by all nodes in both groups. + */ +public class BridgePartition extends NetworkPartition { + + String bridgeNode; + final boolean unresponsive; + + public BridgePartition(Random random, boolean unresponsive) { + super(random); + this.unresponsive = unresponsive; + } + + @Override + public void applyToCluster(InternalTestCluster cluster) { + bridgeNode = randomFrom(random, cluster.getNodeNames()); + this.cluster = cluster; + for (String node: cluster.getNodeNames()) { + if (node.equals(bridgeNode) == false) { + super.applyToNode(node, cluster); + } + } + } + + @Override + public TimeValue expectedTimeToHeal() { + return TimeValue.timeValueSeconds(0); + } + + @Override + void applyDisruption(MockTransportService transportService1, MockTransportService transportService2) { + if (unresponsive) { + transportService1.addUnresponsiveRule(transportService2); + transportService2.addUnresponsiveRule(transportService1); + } else { + transportService1.addFailToSendNoConnectRule(transportService2); + transportService2.addFailToSendNoConnectRule(transportService1); + } + } + + @Override + protected String getPartitionDescription() { + return "bridge (super connected node: [" + bridgeNode + "], unresponsive [" + unresponsive + "])"; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java index d957220c6df..caab35e4b42 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java @@ -61,7 +61,7 @@ public class IntermittentLongGCDisruption extends LongGCDisruption { this.delayDurationMax = delayDurationMax; } - final static AtomicInteger thread_ids = new AtomicInteger(); + static final AtomicInteger thread_ids = new AtomicInteger(); @Override public void startDisrupting() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java index 591540e72b9..b210a20cf70 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java @@ -33,7 +33,7 @@ import java.util.regex.Pattern; */ public class LongGCDisruption extends SingleNodeDisruption { - private final static Pattern[] unsafeClasses = new Pattern[]{ + private static final Pattern[] unsafeClasses = new Pattern[]{ // logging has shared JVM locks - we may suspend a thread and block other nodes from doing their thing Pattern.compile("Logger") }; diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java index be0b69a8e8b..f69c0a3085d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/SlowClusterStateProcessing.java @@ -124,7 +124,7 @@ public class SlowClusterStateProcessing extends SingleNodeDisruption { } @Override - public void onFailure(String source, Throwable t) { + public void onFailure(String source, Exception e) { countDownLatch.countDown(); } }); diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java index bf32b6b8575..304e3047496 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java @@ -172,7 +172,7 @@ public final class MockEngineSupport { return reader; } - public static abstract class DirectoryReaderWrapper extends FilterDirectoryReader { + public abstract static class DirectoryReaderWrapper extends FilterDirectoryReader { protected final SubReaderWrapper subReaderWrapper; public DirectoryReaderWrapper(DirectoryReader in, SubReaderWrapper subReaderWrapper) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java index 73281b3f6ea..37ed43b9450 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java @@ -62,18 +62,18 @@ public class ThrowingLeafReaderWrapper extends FilterLeafReader { * A callback interface that allows to throw certain exceptions for * methods called on the IndexReader that is wrapped by {@link ThrowingLeafReaderWrapper} */ - public static interface Thrower { + public interface Thrower { /** * Maybe throws an exception ;) */ - public void maybeThrow(Flags flag) throws IOException; + void maybeThrow(Flags flag) throws IOException; /** * If this method returns true the {@link Terms} instance for the given field * is wrapped with Thrower support otherwise no exception will be thrown for * the current {@link Terms} instance or any other instance obtained from it. */ - public boolean wrapTerms(String field); + boolean wrapTerms(String field); } public ThrowingLeafReaderWrapper(LeafReader in, Thrower thrower) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 51d15e019a4..db004ca0c3b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -30,6 +30,7 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.alias.exists.AliasesExistResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequestBuilder; import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; @@ -76,6 +77,7 @@ import java.util.List; import java.util.Locale; import java.util.Set; +import static java.util.Collections.emptyList; import static org.apache.lucene.util.LuceneTestCase.random; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.CoreMatchers.equalTo; @@ -95,9 +97,6 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -/** - * - */ public class ElasticsearchAssertions { public static void assertAcked(AcknowledgedRequestBuilder builder) { @@ -126,6 +125,17 @@ public class ElasticsearchAssertions { assertVersionSerializable(response); } + /** + * Assert that an index creation was fully acknowledged, meaning that both the index creation cluster + * state update was successful and that the requisite number of shard copies were started before returning. + */ + public static void assertAcked(CreateIndexResponse response) { + assertThat(response.getClass().getSimpleName() + " failed - not acked", response.isAcknowledged(), equalTo(true)); + assertVersionSerializable(response); + assertTrue(response.getClass().getSimpleName() + " failed - index creation acked but not all shards were started", + response.isShardsAcked()); + } + /** * Executes the request and fails if the request has not been blocked. * @@ -555,7 +565,6 @@ public class ElasticsearchAssertions { extraInfo += " with status [" + status + "]"; } - try { future.actionGet(); fail = true; @@ -565,7 +574,7 @@ public class ElasticsearchAssertions { if (status != null) { assertThat(extraInfo, ExceptionsHelper.status(esException), equalTo(status)); } - } catch (Throwable e) { + } catch (Exception e) { assertThat(extraInfo, e, instanceOf(exceptionClass)); if (status != null) { assertThat(extraInfo, ExceptionsHelper.status(e), equalTo(status)); @@ -597,7 +606,7 @@ public class ElasticsearchAssertions { try { future.actionGet(); fail = true; - } catch (Throwable e) { + } catch (Exception e) { assertThat(extraInfo, ExceptionsHelper.status(e), equalTo(status)); } // has to be outside catch clause to get a proper message @@ -631,7 +640,7 @@ public class ElasticsearchAssertions { registry = ESIntegTestCase.internalCluster().getInstance(NamedWriteableRegistry.class); } else { registry = new NamedWriteableRegistry(); - new SearchModule(Settings.EMPTY, registry); + new SearchModule(Settings.EMPTY, registry, false, emptyList()); } assertVersionSerializable(version, streamable, registry); } @@ -647,7 +656,7 @@ public class ElasticsearchAssertions { ((ActionRequest) streamable).validate(); } BytesReference orig = serialize(version, streamable); - StreamInput input = StreamInput.wrap(orig); + StreamInput input = orig.streamInput(); if (namedWriteableRegistry != null) { input = new NamedWriteableAwareStreamInput(input, namedWriteableRegistry); } @@ -655,37 +664,48 @@ public class ElasticsearchAssertions { newInstance.readFrom(input); assertThat("Stream should be fully read with version [" + version + "] for streamable [" + streamable + "]", input.available(), equalTo(0)); - assertThat("Serialization failed with version [" + version + "] bytes should be equal for streamable [" + streamable + "]", - serialize(version, streamable), equalTo(orig)); - } catch (Throwable ex) { + BytesReference newBytes = serialize(version, streamable); + if (false == orig.equals(newBytes)) { + // The bytes are different. That is a failure. Lets try to throw a useful exception for debugging. + String message = "Serialization failed with version [" + version + "] bytes should be equal for streamable [" + streamable + + "]"; + // If the bytes are different then comparing BytesRef's toStrings will show you *where* they are different + assertEquals(message, orig.toBytesRef().toString(), newBytes.toBytesRef().toString()); + // They bytes aren't different. Very very weird. + fail(message); + } + } catch (Exception ex) { throw new RuntimeException("failed to check serialization - version [" + version + "] for streamable [" + streamable + "]", ex); } } - public static void assertVersionSerializable(Version version, final Throwable t) { - ElasticsearchAssertions.assertVersionSerializable(version, new ThrowableWrapper(t)); + public static void assertVersionSerializable(Version version, final Exception e) { + ElasticsearchAssertions.assertVersionSerializable(version, new ExceptionWrapper(e)); } - public static final class ThrowableWrapper implements Streamable { - Throwable throwable; - public ThrowableWrapper(Throwable t) { - throwable = t; + public static final class ExceptionWrapper implements Streamable { + + private Exception exception; + + public ExceptionWrapper(Exception e) { + exception = e; } - public ThrowableWrapper() { - throwable = null; + public ExceptionWrapper() { + exception = null; } @Override public void readFrom(StreamInput in) throws IOException { - throwable = in.readThrowable(); + exception = in.readException(); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeThrowable(throwable); + out.writeException(exception); } + } @@ -697,7 +717,7 @@ public class ElasticsearchAssertions { assertThat(constructor, Matchers.notNullValue()); Streamable newInstance = constructor.newInstance(); return newInstance; - } catch (Throwable e) { + } catch (Exception e) { return null; } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index 2bdec79f3a0..7103cda945b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -140,8 +140,7 @@ public class ReproduceInfoPrinter extends RunListener { appendProperties("tests.es.logger.level"); if (inVerifyPhase()) { // these properties only make sense for integration tests - appendProperties("tests.es.node.mode", "tests.es.node.local", TESTS_CLUSTER, - ESIntegTestCase.TESTS_ENABLE_MOCK_MODULES); + appendProperties(TESTS_CLUSTER, ESIntegTestCase.TESTS_ENABLE_MOCK_MODULES); } appendProperties("tests.assertion.disabled", "tests.security.manager", "tests.nightly", "tests.jvms", "tests.client.ratio", "tests.heap.size", "tests.bwc", "tests.bwc.version"); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 12e46087b6c..e8895aa90db 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -359,6 +359,9 @@ public abstract class ESRestTestCase extends ESTestCase { //skip test if the whole suite (yaml file) is disabled assumeFalse(buildSkipMessage(testCandidate.getSuitePath(), testCandidate.getSetupSection().getSkipSection()), testCandidate.getSetupSection().getSkipSection().skip(restTestExecutionContext.esVersion())); + //skip test if the whole suite (yaml file) is disabled + assumeFalse(buildSkipMessage(testCandidate.getSuitePath(), testCandidate.getTeardownSection().getSkipSection()), + testCandidate.getTeardownSection().getSkipSection().skip(restTestExecutionContext.esVersion())); //skip test if test section is disabled assumeFalse(buildSkipMessage(testCandidate.getTestPath(), testCandidate.getTestSection().getSkipSection()), testCandidate.getTestSection().getSkipSection().skip(restTestExecutionContext.esVersion())); @@ -391,8 +394,16 @@ public abstract class ESRestTestCase extends ESTestCase { restTestExecutionContext.clear(); - for (ExecutableSection executableSection : testCandidate.getTestSection().getExecutableSections()) { - executableSection.execute(restTestExecutionContext); + try { + for (ExecutableSection executableSection : testCandidate.getTestSection().getExecutableSections()) { + executableSection.execute(restTestExecutionContext); + } + } finally { + logger.debug("start teardown test [{}]", testCandidate.getTestPath()); + for (DoSection doSection : testCandidate.getTeardownSection().getDoSections()) { + doSection.execute(restTestExecutionContext); + } + logger.debug("end teardown test [{}]", testCandidate.getTestPath()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ObjectPath.java similarity index 78% rename from test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java rename to test/framework/src/main/java/org/elasticsearch/test/rest/ObjectPath.java index b338d76d985..8c492d279b0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/json/JsonPath.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ObjectPath.java @@ -16,11 +16,10 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.test.rest.json; +package org.elasticsearch.test.rest; +import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.rest.Stash; import java.io.IOException; import java.util.ArrayList; @@ -28,22 +27,23 @@ import java.util.List; import java.util.Map; /** - * Holds a json object and allows to extract specific values from it + * Holds an object and allows to extract specific values from it given their path */ -public class JsonPath { +public class ObjectPath { - final String json; - final Map jsonMap; + private final Object object; - public JsonPath(String json) throws IOException { - this.json = json; - this.jsonMap = convertToMap(json); + public static ObjectPath createFromXContent(XContent xContent, String input) throws IOException { + try (XContentParser parser = xContent.createParser(input)) { + if (parser.nextToken() == XContentParser.Token.START_ARRAY) { + return new ObjectPath(parser.listOrderedMap()); + } + return new ObjectPath(parser.mapOrdered()); + } } - private static Map convertToMap(String json) throws IOException { - try (XContentParser parser = JsonXContent.jsonXContent.createParser(json)) { - return parser.mapOrdered(); - } + public ObjectPath(Object object) { + this.object = object; } /** @@ -58,7 +58,7 @@ public class JsonPath { */ public Object evaluate(String path, Stash stash) throws IOException { String[] parts = parsePath(path); - Object object = jsonMap; + Object object = this.object; for (String part : parts) { object = evaluate(part, object, stash); if (object == null) { @@ -70,8 +70,8 @@ public class JsonPath { @SuppressWarnings("unchecked") private Object evaluate(String key, Object object, Stash stash) throws IOException { - if (stash.isStashedValue(key)) { - key = stash.unstashValue(key).toString(); + if (stash.containsStashedValue(key)) { + key = stash.getValue(key).toString(); } if (object instanceof Map) { @@ -84,7 +84,8 @@ public class JsonPath { } catch (NumberFormatException e) { throw new IllegalArgumentException("element was a list, but [" + key + "] was not numeric", e); } catch (IndexOutOfBoundsException e) { - throw new IllegalArgumentException("element was a list with " + list.size() + " elements, but [" + key + "] was out of bounds", e); + throw new IllegalArgumentException("element was a list with " + list.size() + + " elements, but [" + key + "] was out of bounds", e); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java index e454c396a3d..57c7e1b1305 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestCandidate.java @@ -20,6 +20,7 @@ package org.elasticsearch.test.rest; import org.elasticsearch.test.rest.section.RestTestSuite; import org.elasticsearch.test.rest.section.SetupSection; +import org.elasticsearch.test.rest.section.TeardownSection; import org.elasticsearch.test.rest.section.TestSection; /** @@ -56,6 +57,10 @@ public class RestTestCandidate { return restTestSuite.getSetupSection(); } + public TeardownSection getTeardownSection() { + return restTestSuite.getTeardownSection(); + } + public TestSection getTestSection() { return testSection; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java index 34397f03d94..d7295e1dca7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestExecutionContext.java @@ -66,8 +66,8 @@ public class RestTestExecutionContext implements Closeable { //makes a copy of the parameters before modifying them for this specific request HashMap requestParams = new HashMap<>(params); for (Map.Entry entry : requestParams.entrySet()) { - if (stash.isStashedValue(entry.getValue())) { - entry.setValue(stash.unstashValue(entry.getValue()).toString()); + if (stash.containsStashedValue(entry.getValue())) { + entry.setValue(stash.getValue(entry.getValue()).toString()); } } @@ -76,7 +76,7 @@ public class RestTestExecutionContext implements Closeable { try { response = callApiInternal(apiName, requestParams, body, headers); //we always stash the last response body - stash.stashResponse(response); + stash.stashValue("body", response.getBody()); return response; } catch(ResponseException e) { response = new RestTestResponse(e); @@ -90,12 +90,12 @@ public class RestTestExecutionContext implements Closeable { } if (bodies.size() == 1) { - return bodyAsString(stash.unstashMap(bodies.get(0))); + return bodyAsString(stash.replaceStashedValues(bodies.get(0))); } StringBuilder bodyBuilder = new StringBuilder(); for (Map body : bodies) { - bodyBuilder.append(bodyAsString(stash.unstashMap(body))).append("\n"); + bodyBuilder.append(bodyAsString(stash.replaceStashedValues(body))).append("\n"); } return bodyBuilder.toString(); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java b/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java index 885df395c2b..f687f2b39bf 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/Stash.java @@ -24,25 +24,27 @@ import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.test.rest.client.RestTestResponse; import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; /** * Allows to cache the last obtained test response and or part of it within variables * that can be used as input values in following requests and assertions. */ public class Stash implements ToXContent { + private static final Pattern EXTENDED_KEY = Pattern.compile("\\$\\{([^}]+)\\}"); private static final ESLogger logger = Loggers.getLogger(Stash.class); public static final Stash EMPTY = new Stash(); private final Map stash = new HashMap<>(); - private RestTestResponse response; + private final ObjectPath stashObjectPath = new ObjectPath(stash); /** * Allows to saved a specific field in the stash as key-value pair @@ -55,12 +57,6 @@ public class Stash implements ToXContent { } } - public void stashResponse(RestTestResponse response) throws IOException { - // TODO we can almost certainly save time by lazily evaluating the body - stashValue("body", response.getBody()); - this.response = response; - } - /** * Clears the previously stashed values */ @@ -69,41 +65,63 @@ public class Stash implements ToXContent { } /** - * Tells whether a particular value needs to be looked up in the stash + * Tells whether a particular key needs to be looked up in the stash based on its name. + * Returns true if the string representation of the key starts with "$", false otherwise * The stash contains fields eventually extracted from previous responses that can be reused * as arguments for following requests (e.g. scroll_id) */ - public boolean isStashedValue(Object key) { - if (key == null) { + public boolean containsStashedValue(Object key) { + if (key == null || false == key instanceof CharSequence) { return false; } String stashKey = key.toString(); - return Strings.hasLength(stashKey) && stashKey.startsWith("$"); + if (false == Strings.hasLength(stashKey)) { + return false; + } + if (stashKey.startsWith("$")) { + return true; + } + return EXTENDED_KEY.matcher(stashKey).find(); } /** - * Extracts a value from the current stash + * Retrieves a value from the current stash. * The stash contains fields eventually extracted from previous responses that can be reused * as arguments for following requests (e.g. scroll_id) */ - public Object unstashValue(String value) throws IOException { - if (value.startsWith("$body.")) { - if (response == null) { - return null; - } - return response.evaluate(value.substring("$body".length()), this); + public Object getValue(String key) throws IOException { + if (key.charAt(0) == '$' && key.charAt(1) != '{') { + return unstash(key.substring(1)); } - Object stashedValue = stash.get(value.substring(1)); + Matcher matcher = EXTENDED_KEY.matcher(key); + /* + * String*Buffer* because that is what the Matcher API takes. In modern versions of java the uncontended synchronization is very, + * very cheap so that should not be a problem. + */ + StringBuffer result = new StringBuffer(key.length()); + if (false == matcher.find()) { + throw new IllegalArgumentException("Doesn't contain any stash keys [" + key + "]"); + } + do { + matcher.appendReplacement(result, Matcher.quoteReplacement(unstash(matcher.group(1)).toString())); + } while (matcher.find()); + matcher.appendTail(result); + return result.toString(); + } + + private Object unstash(String key) throws IOException { + Object stashedValue = stashObjectPath.evaluate(key); if (stashedValue == null) { - throw new IllegalArgumentException("stashed value not found for key [" + value + "]"); + throw new IllegalArgumentException("stashed value not found for key [" + key + "]"); } return stashedValue; } /** - * Recursively unstashes map values if needed + * Goes recursively against each map entry and replaces any string value starting with "$" with its + * corresponding value retrieved from the stash */ - public Map unstashMap(Map map) throws IOException { + public Map replaceStashedValues(Map map) throws IOException { Map copy = new HashMap<>(map); unstashObject(copy); return copy; @@ -115,8 +133,8 @@ public class Stash implements ToXContent { List list = (List) obj; for (int i = 0; i < list.size(); i++) { Object o = list.get(i); - if (isStashedValue(o)) { - list.set(i, unstashValue(o.toString())); + if (containsStashedValue(o)) { + list.set(i, getValue(o.toString())); } else { unstashObject(o); } @@ -125,8 +143,8 @@ public class Stash implements ToXContent { if (obj instanceof Map) { Map map = (Map) obj; for (Map.Entry entry : map.entrySet()) { - if (isStashedValue(entry.getValue())) { - entry.setValue(unstashValue(entry.getValue().toString())); + if (containsStashedValue(entry.getValue())) { + entry.setValue(getValue(entry.getValue().toString())); } else { unstashObject(entry.getValue()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java index 6879e02747d..c97ca7cd2fd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestClient.java @@ -22,16 +22,8 @@ import com.carrotsearch.randomizedtesting.RandomizedTest; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.config.Registry; -import org.apache.http.config.RegistryBuilder; -import org.apache.http.conn.socket.ConnectionSocketFactory; -import org.apache.http.conn.socket.PlainConnectionSocketFactory; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.message.BasicHeader; import org.apache.http.ssl.SSLContexts; import org.apache.lucene.util.IOUtils; @@ -39,6 +31,7 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.SSLSocketFactoryHttpConfigCallback; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; @@ -82,11 +75,6 @@ public class RestTestClient implements Closeable { public static final String TRUSTSTORE_PATH = "truststore.path"; public static final String TRUSTSTORE_PASSWORD = "truststore.password"; - public static final int CONNECT_TIMEOUT_MILLIS = 1000; - public static final int SOCKET_TIMEOUT_MILLIS = 30000; - public static final int MAX_RETRY_TIMEOUT_MILLIS = SOCKET_TIMEOUT_MILLIS; - public static final int CONNECTION_REQUEST_TIMEOUT_MILLIS = 500; - private static final ESLogger logger = Loggers.getLogger(RestTestClient.class); //query_string params that don't need to be declared in the spec, thay are supported by default private static final Set ALWAYS_ACCEPTED_QUERY_STRING_PARAMS = Sets.newHashSet("pretty", "source", "filter_path"); @@ -113,7 +101,7 @@ public class RestTestClient implements Closeable { //we don't really use the urls here, we rely on the client doing round-robin to touch all the nodes in the cluster String method = restApi.getMethods().get(0); String endpoint = restApi.getPaths().get(0); - Response response = restClient.performRequest(method, endpoint, Collections.emptyMap(), null); + Response response = restClient.performRequest(method, endpoint); RestTestResponse restTestResponse = new RestTestResponse(response); Object latestVersion = restTestResponse.evaluate("version.number"); if (latestVersion == null) { @@ -274,7 +262,15 @@ public class RestTestClient implements Closeable { } private static RestClient createRestClient(URL[] urls, Settings settings) throws IOException { - SSLConnectionSocketFactory sslsf; + String protocol = settings.get(PROTOCOL, "http"); + HttpHost[] hosts = new HttpHost[urls.length]; + for (int i = 0; i < hosts.length; i++) { + URL url = urls[i]; + hosts[i] = new HttpHost(url.getHost(), url.getPort(), protocol); + } + RestClient.Builder builder = RestClient.builder(hosts).setMaxRetryTimeoutMillis(30000) + .setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setSocketTimeout(30000)); + String keystorePath = settings.get(TRUSTSTORE_PATH); if (keystorePath != null) { final String keystorePass = settings.get(TRUSTSTORE_PASSWORD); @@ -291,38 +287,13 @@ public class RestTestClient implements Closeable { keyStore.load(is, keystorePass.toCharArray()); } SSLContext sslcontext = SSLContexts.custom().loadTrustMaterial(keyStore, null).build(); - sslsf = new SSLConnectionSocketFactory(sslcontext); + SSLConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(sslcontext); + builder.setHttpClientConfigCallback(new SSLSocketFactoryHttpConfigCallback(sslConnectionSocketFactory)); } catch (KeyStoreException|NoSuchAlgorithmException|KeyManagementException|CertificateException e) { throw new RuntimeException(e); } - } else { - sslsf = SSLConnectionSocketFactory.getSocketFactory(); } - Registry socketFactoryRegistry = RegistryBuilder.create() - .register("http", PlainConnectionSocketFactory.getSocketFactory()) - .register("https", sslsf) - .build(); - PoolingHttpClientConnectionManager connectionManager = new PoolingHttpClientConnectionManager(socketFactoryRegistry); - //default settings may be too constraining - connectionManager.setDefaultMaxPerRoute(10); - connectionManager.setMaxTotal(30); - - //default timeouts are all infinite - RequestConfig requestConfig = RequestConfig.custom().setConnectTimeout(CONNECT_TIMEOUT_MILLIS) - .setSocketTimeout(SOCKET_TIMEOUT_MILLIS) - .setConnectionRequestTimeout(CONNECTION_REQUEST_TIMEOUT_MILLIS).build(); - CloseableHttpClient httpClient = HttpClientBuilder.create() - .setConnectionManager(connectionManager).setDefaultRequestConfig(requestConfig).build(); - - String protocol = settings.get(PROTOCOL, "http"); - HttpHost[] hosts = new HttpHost[urls.length]; - for (int i = 0; i < hosts.length; i++) { - URL url = urls[i]; - hosts[i] = new HttpHost(url.getHost(), url.getPort(), protocol); - } - - RestClient.Builder builder = RestClient.builder(hosts).setHttpClient(httpClient).setMaxRetryTimeoutMillis(MAX_RETRY_TIMEOUT_MILLIS); try (ThreadContext threadContext = new ThreadContext(settings)) { Header[] defaultHeaders = new Header[threadContext.getHeaders().size()]; int i = 0; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java index 5b5773d6fdc..4644b87b8e7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestTestResponse.java @@ -23,23 +23,24 @@ import org.apache.http.util.EntityUtils; import org.apache.lucene.util.IOUtils; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.rest.Stash; -import org.elasticsearch.test.rest.json.JsonPath; import java.io.IOException; import java.nio.charset.StandardCharsets; /** * Response obtained from a REST call, eagerly reads the response body into a string for later optional parsing. - * Supports parsing the response body as json when needed and returning specific values extracted from it. + * Supports parsing the response body when needed and returning specific values extracted from it. */ public class RestTestResponse { private final Response response; private final String body; - private JsonPath parsedResponse; + private ObjectPath parsedResponse; - public RestTestResponse(Response response) { + public RestTestResponse(Response response) throws IOException { this.response = response; if (response.getEntity() != null) { try { @@ -53,11 +54,24 @@ public class RestTestResponse { } else { this.body = null; } + parseResponseBody(); } - public RestTestResponse(ResponseException responseException) { + public RestTestResponse(ResponseException responseException) throws IOException { this.response = responseException.getResponse(); this.body = responseException.getResponseBody(); + parseResponseBody(); + } + + private void parseResponseBody() throws IOException { + if (body != null) { + String contentType = response.getHeader("Content-Type"); + XContentType xContentType = XContentType.fromMediaTypeOrFormat(contentType); + //skip parsing if we got text back (e.g. if we called _cat apis) + if (xContentType == XContentType.JSON || xContentType == XContentType.YAML) { + this.parsedResponse = ObjectPath.createFromXContent(xContentType.xContent(), body); + } + } } public int getStatusCode() { @@ -73,11 +87,7 @@ public class RestTestResponse { * Might be a string or a json object parsed as a map. */ public Object getBody() throws IOException { - if (isJson()) { - JsonPath parsedResponse = parsedResponse(); - if (parsedResponse == null) { - return null; - } + if (parsedResponse != null) { return parsedResponse.evaluate(""); } return body; @@ -95,23 +105,21 @@ public class RestTestResponse { } /** - * Parses the response body as json and extracts a specific value from it (identified by the provided path) + * Parses the response body and extracts a specific value from it (identified by the provided path) */ public Object evaluate(String path) throws IOException { return evaluate(path, Stash.EMPTY); } /** - * Parses the response body as json and extracts a specific value from it (identified by the provided path) + * Parses the response body and extracts a specific value from it (identified by the provided path) */ public Object evaluate(String path, Stash stash) throws IOException { if (response == null) { return null; } - JsonPath jsonPath = parsedResponse(); - - if (jsonPath == null) { + if (parsedResponse == null) { //special case: api that don't support body (e.g. exists) return true if 200, false if 404, even if no body //is_true: '' means the response had no body but the client returned true (caused by 200) //is_false: '' means the response had no body but the client returned false (caused by 404) @@ -121,21 +129,6 @@ public class RestTestResponse { return null; } - return jsonPath.evaluate(path, stash); - } - - private boolean isJson() { - String contentType = response.getHeader("Content-Type"); - return contentType != null && contentType.contains("application/json"); - } - - private JsonPath parsedResponse() throws IOException { - if (parsedResponse != null) { - return parsedResponse; - } - if (response == null || body == null) { - return null; - } - return parsedResponse = new JsonPath(body); + return parsedResponse.evaluate(path, stash); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java index 68f833d35c7..7a4cd0f316a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanEqualToParser.java @@ -33,7 +33,8 @@ public class GreaterThanEqualToParser implements RestTestFragmentParser stringObjectTuple = parseContext.parseTuple(); if (! (stringObjectTuple.v2() instanceof Comparable) ) { - throw new RestTestParseException("gte section can only be used with objects that support natural ordering, found " + stringObjectTuple.v2().getClass().getSimpleName()); + throw new RestTestParseException("gte section can only be used with objects that support natural ordering, found " + + stringObjectTuple.v2().getClass().getSimpleName()); } return new GreaterThanEqualToAssertion(stringObjectTuple.v1(), stringObjectTuple.v2()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java index a66122138c1..7e1ca1ece7f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/GreaterThanParser.java @@ -32,7 +32,8 @@ public class GreaterThanParser implements RestTestFragmentParser stringObjectTuple = parseContext.parseTuple(); if (! (stringObjectTuple.v2() instanceof Comparable) ) { - throw new RestTestParseException("gt section can only be used with objects that support natural ordering, found " + stringObjectTuple.v2().getClass().getSimpleName()); + throw new RestTestParseException("gt section can only be used with objects that support natural ordering, found " + + stringObjectTuple.v2().getClass().getSimpleName()); } return new GreaterThanAssertion(stringObjectTuple.v1(), stringObjectTuple.v2()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java index f2d53d05a56..a30979c6a3c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanOrEqualToParser.java @@ -33,7 +33,8 @@ public class LessThanOrEqualToParser implements RestTestFragmentParser stringObjectTuple = parseContext.parseTuple(); if (! (stringObjectTuple.v2() instanceof Comparable) ) { - throw new RestTestParseException("lte section can only be used with objects that support natural ordering, found " + stringObjectTuple.v2().getClass().getSimpleName()); + throw new RestTestParseException("lte section can only be used with objects that support natural ordering, found " + + stringObjectTuple.v2().getClass().getSimpleName()); } return new LessThanOrEqualToAssertion(stringObjectTuple.v1(), stringObjectTuple.v2()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java index 065dd19d6a1..fc31f221758 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/LessThanParser.java @@ -32,7 +32,8 @@ public class LessThanParser implements RestTestFragmentParser public LessThanAssertion parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { Tuple stringObjectTuple = parseContext.parseTuple(); if (! (stringObjectTuple.v2() instanceof Comparable) ) { - throw new RestTestParseException("lt section can only be used with objects that support natural ordering, found " + stringObjectTuple.v2().getClass().getSimpleName()); + throw new RestTestParseException("lt section can only be used with objects that support natural ordering, found " + + stringObjectTuple.v2().getClass().getSimpleName()); } return new LessThanAssertion(stringObjectTuple.v1(), stringObjectTuple.v2()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java index 0a0c2722020..f7325443deb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParseContext.java @@ -18,20 +18,21 @@ */ package org.elasticsearch.test.rest.parser; -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.test.rest.section.DoSection; import org.elasticsearch.test.rest.section.ExecutableSection; -import org.elasticsearch.test.rest.section.ResponseBodyAssertion; import org.elasticsearch.test.rest.section.SetupSection; import org.elasticsearch.test.rest.section.SkipSection; +import org.elasticsearch.test.rest.section.TeardownSection; import org.elasticsearch.test.rest.section.TestSection; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + /** * Context shared across the whole tests parse phase. * Provides shared parse methods and holds information needed to parse the test sections (e.g. es version) @@ -39,6 +40,7 @@ import org.elasticsearch.test.rest.section.TestSection; public class RestTestSuiteParseContext { private static final SetupSectionParser SETUP_SECTION_PARSER = new SetupSectionParser(); + private static final TeardownSectionParser TEARDOWN_SECTION_PARSER = new TeardownSectionParser(); private static final RestTestSectionParser TEST_SECTION_PARSER = new RestTestSectionParser(); private static final SkipSectionParser SKIP_SECTION_PARSER = new SkipSectionParser(); private static final DoSectionParser DO_SECTION_PARSER = new DoSectionParser(); @@ -54,7 +56,6 @@ public class RestTestSuiteParseContext { EXECUTABLE_SECTIONS_PARSERS.put("lt", new LessThanParser()); EXECUTABLE_SECTIONS_PARSERS.put("lte", new LessThanOrEqualToParser()); EXECUTABLE_SECTIONS_PARSERS.put("length", new LengthParser()); - EXECUTABLE_SECTIONS_PARSERS.put("response_body", ResponseBodyAssertion.PARSER); } private final String api; @@ -93,6 +94,19 @@ public class RestTestSuiteParseContext { return SetupSection.EMPTY; } + public TeardownSection parseTeardownSection() throws IOException, RestTestParseException { + advanceToFieldName(); + + if ("teardown".equals(parser.currentName())) { + parser.nextToken(); + TeardownSection teardownSection = TEARDOWN_SECTION_PARSER.parse(this); + parser.nextToken(); + return teardownSection; + } + + return TeardownSection.EMPTY; + } + public TestSection parseTestSection() throws IOException, RestTestParseException { return TEST_SECTION_PARSER.parse(this); } @@ -144,7 +158,8 @@ public class RestTestSuiteParseContext { token = parser.nextToken(); } if (token != XContentParser.Token.FIELD_NAME) { - throw new RestTestParseException("malformed test section: field name expected but found " + token + " at " + parser.getTokenLocation()); + throw new RestTestParseException("malformed test section: field name expected but found " + token + " at " + + parser.getTokenLocation()); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java index d3f93939c2e..f22f0109594 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/parser/RestTestSuiteParser.java @@ -21,6 +21,7 @@ package org.elasticsearch.test.rest.parser; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.yaml.YamlXContent; import org.elasticsearch.test.rest.section.RestTestSuite; +import org.elasticsearch.test.rest.section.TeardownSection; import org.elasticsearch.test.rest.section.TestSection; import java.io.IOException; @@ -70,11 +71,13 @@ public class RestTestSuiteParser implements RestTestFragmentParser { + + @Override + public TeardownSection parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { + XContentParser parser = parseContext.parser(); + + TeardownSection teardownSection = new TeardownSection(); + teardownSection.setSkipSection(parseContext.parseSkipSection()); + + while (parser.currentToken() != XContentParser.Token.END_ARRAY) { + parseContext.advanceToFieldName(); + if (!"do".equals(parser.currentName())) { + throw new RestTestParseException("section [" + parser.currentName() + "] not supported within teardown section"); + } + + parser.nextToken(); + teardownSection.addDoSection(parseContext.parseDoSection()); + parser.nextToken(); + } + + parser.nextToken(); + return teardownSection; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java index c420309f206..fbba9de163b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/Assertion.java @@ -48,18 +48,18 @@ public abstract class Assertion implements ExecutableSection { if (expectedValue instanceof Map) { @SuppressWarnings("unchecked") Map map = (Map) expectedValue; - return executionContext.stash().unstashMap(map); + return executionContext.stash().replaceStashedValues(map); } - if (executionContext.stash().isStashedValue(expectedValue)) { - return executionContext.stash().unstashValue(expectedValue.toString()); + if (executionContext.stash().containsStashedValue(expectedValue)) { + return executionContext.stash().getValue(expectedValue.toString()); } return expectedValue; } protected final Object getActualValue(RestTestExecutionContext executionContext) throws IOException { - if (executionContext.stash().isStashedValue(field)) { - return executionContext.stash().unstashValue(field); + if (executionContext.stash().containsStashedValue(field)) { + return executionContext.stash().getValue(field); } return executionContext.response(field); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java index ade7fbd59ca..63f69696653 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanAssertion.java @@ -43,8 +43,10 @@ public class GreaterThanAssertion extends Assertion { @SuppressWarnings("unchecked") protected void doAssert(Object actualValue, Object expectedValue) { logger.trace("assert that [{}] is greater than [{}] (field: [{}])", actualValue, expectedValue, getField()); - assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", actualValue, instanceOf(Comparable.class)); - assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", expectedValue, instanceOf(Comparable.class)); + assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", + actualValue, instanceOf(Comparable.class)); + assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", + expectedValue, instanceOf(Comparable.class)); try { assertThat(errorMessage(), (Comparable) actualValue, greaterThan((Comparable) expectedValue)); } catch (ClassCastException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java index cfdca7bc338..297eecf2d2a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/GreaterThanEqualToAssertion.java @@ -43,8 +43,10 @@ public class GreaterThanEqualToAssertion extends Assertion { @Override protected void doAssert(Object actualValue, Object expectedValue) { logger.trace("assert that [{}] is greater than or equal to [{}] (field: [{}])", actualValue, expectedValue, getField()); - assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", actualValue, instanceOf(Comparable.class)); - assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", expectedValue, instanceOf(Comparable.class)); + assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", + actualValue, instanceOf(Comparable.class)); + assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", + expectedValue, instanceOf(Comparable.class)); try { assertThat(errorMessage(), (Comparable) actualValue, greaterThanOrEqualTo((Comparable) expectedValue)); } catch (ClassCastException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java index 265487a0388..eb28ba01a94 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LengthAssertion.java @@ -44,7 +44,8 @@ public class LengthAssertion extends Assertion { @Override protected void doAssert(Object actualValue, Object expectedValue) { logger.trace("assert that [{}] has length [{}] (field: [{}])", actualValue, expectedValue, getField()); - assertThat("expected value of [" + getField() + "] is not numeric (got [" + expectedValue.getClass() + "]", expectedValue, instanceOf(Number.class)); + assertThat("expected value of [" + getField() + "] is not numeric (got [" + expectedValue.getClass() + "]", + expectedValue, instanceOf(Number.class)); int length = ((Number) expectedValue).intValue(); if (actualValue instanceof String) { assertThat(errorMessage(), ((String) actualValue).length(), equalTo(length)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java index 89387ff8952..153a7824569 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanAssertion.java @@ -44,8 +44,10 @@ public class LessThanAssertion extends Assertion { @SuppressWarnings("unchecked") protected void doAssert(Object actualValue, Object expectedValue) { logger.trace("assert that [{}] is less than [{}] (field: [{}])", actualValue, expectedValue, getField()); - assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", actualValue, instanceOf(Comparable.class)); - assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", expectedValue, instanceOf(Comparable.class)); + assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", + actualValue, instanceOf(Comparable.class)); + assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", + expectedValue, instanceOf(Comparable.class)); try { assertThat(errorMessage(), (Comparable) actualValue, lessThan((Comparable) expectedValue)); } catch (ClassCastException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java index 99cbf1155d5..1eb3a9fc2b2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/LessThanOrEqualToAssertion.java @@ -43,8 +43,10 @@ public class LessThanOrEqualToAssertion extends Assertion { @Override protected void doAssert(Object actualValue, Object expectedValue) { logger.trace("assert that [{}] is less than or equal to [{}] (field: [{}])", actualValue, expectedValue, getField()); - assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", actualValue, instanceOf(Comparable.class)); - assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", expectedValue, instanceOf(Comparable.class)); + assertThat("value of [" + getField() + "] is not comparable (got [" + safeClass(actualValue) + "])", + actualValue, instanceOf(Comparable.class)); + assertThat("expected value of [" + getField() + "] is not comparable (got [" + expectedValue.getClass() + "])", + expectedValue, instanceOf(Comparable.class)); try { assertThat(errorMessage(), (Comparable) actualValue, lessThanOrEqualTo((Comparable) expectedValue)); } catch (ClassCastException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java index e00fbbea01c..3a96d4532a0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/MatchAssertion.java @@ -18,15 +18,21 @@ */ package org.elasticsearch.test.rest.section; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; import java.util.regex.Pattern; import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.notNullValue; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; /** @@ -45,12 +51,12 @@ public class MatchAssertion extends Assertion { @Override protected void doAssert(Object actualValue, Object expectedValue) { - //if the value is wrapped into / it is a regexp (e.g. /s+d+/) if (expectedValue instanceof String) { String expValue = ((String) expectedValue).trim(); if (expValue.length() > 2 && expValue.startsWith("/") && expValue.endsWith("/")) { - assertThat("field [" + getField() + "] was expected to be of type String but is an instanceof [" + safeClass(actualValue) + "]", actualValue, instanceOf(String.class)); + assertThat("field [" + getField() + "] was expected to be of type String but is an instanceof [" + + safeClass(actualValue) + "]", actualValue, instanceOf(String.class)); String stringValue = (String) actualValue; String regex = expValue.substring(1, expValue.length() - 1); logger.trace("assert that [{}] matches [{}]", stringValue, regex); @@ -60,20 +66,131 @@ public class MatchAssertion extends Assertion { } } - assertThat(errorMessage(), actualValue, notNullValue()); + assertNotNull("field [" + getField() + "] is null", actualValue); logger.trace("assert that [{}] matches [{}] (field [{}])", actualValue, expectedValue, getField()); - if (!actualValue.getClass().equals(safeClass(expectedValue))) { + if (actualValue.getClass().equals(safeClass(expectedValue)) == false) { if (actualValue instanceof Number && expectedValue instanceof Number) { //Double 1.0 is equal to Integer 1 - assertThat(errorMessage(), ((Number) actualValue).doubleValue(), equalTo(((Number) expectedValue).doubleValue())); + assertThat("field [" + getField() + "] doesn't match the expected value", + ((Number) actualValue).doubleValue(), equalTo(((Number) expectedValue).doubleValue())); return; } } - assertThat(errorMessage(), actualValue, equalTo(expectedValue)); + if (expectedValue.equals(actualValue) == false) { + FailureMessage message = new FailureMessage(getField()); + message.compare(getField(), actualValue, expectedValue); + throw new AssertionError(message.message); + } } - private String errorMessage() { - return "field [" + getField() + "] doesn't match the expected value"; + private static class FailureMessage { + private final StringBuilder message; + private int indent = 0; + + private FailureMessage(String field) { + this.message = new StringBuilder(field + " didn't match the expected value:\n"); + } + + private void compareMaps(Map actual, Map expected) { + actual = new TreeMap<>(actual); + expected = new TreeMap<>(expected); + for (Map.Entry expectedEntry : expected.entrySet()) { + compare(expectedEntry.getKey(), actual.remove(expectedEntry.getKey()), expectedEntry.getValue()); + } + for (Map.Entry unmatchedEntry : actual.entrySet()) { + field(unmatchedEntry.getKey(), "unexpected but found [" + unmatchedEntry.getValue() + "]"); + } + } + + private void compareLists(List actual, List expected) { + int i = 0; + while (i < actual.size() && i < expected.size()) { + compare(Integer.toString(i), actual.get(i), expected.get(i)); + i++; + } + if (actual.size() == expected.size()) { + return; + } + indent(); + if (actual.size() < expected.size()) { + message.append("expected [").append(expected.size() - i).append("] more entries\n"); + return; + } + message.append("received [").append(actual.size() - i).append("] more entries than expected\n"); + } + + private void compare(String field, @Nullable Object actual, Object expected) { + if (expected instanceof Map) { + if (actual == null) { + field(field, "expected map but not found"); + return; + } + if (false == actual instanceof Map) { + field(field, "expected map but found [" + actual + "]"); + return; + } + @SuppressWarnings("unchecked") + Map expectedMap = (Map) expected; + @SuppressWarnings("unchecked") + Map actualMap = (Map) actual; + if (expectedMap.isEmpty() && actualMap.isEmpty()) { + field(field, "same [empty map]"); + return; + } + field(field, null); + indent += 1; + compareMaps(actualMap, expectedMap); + indent -= 1; + return; + } + if (expected instanceof List) { + if (actual == null) { + field(field, "expected list but not found"); + return; + } + if (false == actual instanceof List) { + field(field, "expected list but found [" + actual + "]"); + return; + } + @SuppressWarnings("unchecked") + List expectedList = (List) expected; + @SuppressWarnings("unchecked") + List actualList = (List) actual; + if (expectedList.isEmpty() && actualList.isEmpty()) { + field(field, "same [empty list]"); + return; + } + field(field, null); + indent += 1; + compareLists(actualList, expectedList); + indent -= 1; + return; + } + if (actual == null) { + field(field, "expected [" + expected + "] but not found"); + return; + } + if (Objects.equals(expected, actual)) { + field(field, "same [" + expected + "]"); + return; + } + field(field, "expected [" + expected + "] but was [" + actual + "]"); + } + + private void indent() { + for (int i = 0; i < indent; i++) { + message.append(" "); + } + } + + private void field(Object name, String info) { + indent(); + message.append(String.format(Locale.ROOT, "%30s: ", name)); + if (info != null) { + message.append(info); + } + message.append('\n'); + } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/ResponseBodyAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/ResponseBodyAssertion.java deleted file mode 100644 index 3ead65a2111..00000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/ResponseBodyAssertion.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.test.rest.section; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; -import java.util.TreeMap; - -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.rest.parser.RestTestFragmentParser; -import org.elasticsearch.test.rest.parser.RestTestParseException; -import org.elasticsearch.test.rest.parser.RestTestSuiteParseContext; - -/** - * Checks that the response body matches some text. - */ -public class ResponseBodyAssertion extends Assertion { - public static final RestTestFragmentParser PARSER = new RestTestFragmentParser() { - @Override - public ResponseBodyAssertion parse(RestTestSuiteParseContext parseContext) throws IOException, RestTestParseException { - try (XContentParser parser = JsonXContent.jsonXContent.createParser(parseContext.parseField())) { - return new ResponseBodyAssertion("$body", parser.map()); - } - } - }; - - private ResponseBodyAssertion(String field, Map expectedValue) { - super(field, expectedValue); - } - - @Override - protected void doAssert(Object actualValue, Object expectedValue) { - if (false == expectedValue.equals(actualValue)) { - @SuppressWarnings("unchecked") - Map actual = (Map) actualValue; - @SuppressWarnings("unchecked") - Map expected = (Map) expectedValue; - FailureMessage message = new FailureMessage(); - message.compareMaps(actual, expected); - throw new AssertionError(message.message); - } - } - - private class FailureMessage { - private final StringBuilder message = new StringBuilder("body didn't match the expected value:\n"); - private int indent = 0; - - private void compareMaps(Map actual, Map expected) { - actual = new TreeMap<>(actual); - expected = new TreeMap<>(expected); - for (Map.Entry expectedEntry : expected.entrySet()) { - compare(expectedEntry.getKey(), actual.remove(expectedEntry.getKey()), expectedEntry.getValue()); - } - for (Map.Entry unmatchedEntry : actual.entrySet()) { - field(unmatchedEntry.getKey(), "unexpected but found [" + unmatchedEntry.getValue() + "]"); - } - } - - private void compareLists(List actual, List expected) { - int i = 0; - while (i < actual.size() && i < expected.size()) { - compare(i, actual.get(i), expected.get(i)); - i++; - } - if (actual.size() == expected.size()) { - return; - } - indent(); - if (actual.size() < expected.size()) { - message.append("expected [").append(expected.size() - i).append("] more entries\n"); - return; - } - message.append("received [").append(actual.size() - i).append("] more entries than expected\n"); - } - - private void compare(Object field, @Nullable Object actual, Object expected) { - if (expected instanceof Map) { - if (actual == null) { - field(field, "expected map but not found"); - return; - } - if (false == actual instanceof Map) { - field(field, "expected map but found [" + actual + "]"); - return; - } - @SuppressWarnings("unchecked") - Map expectedMap = (Map) expected; - @SuppressWarnings("unchecked") - Map actualMap = (Map) actual; - if (expectedMap.isEmpty() && actualMap.isEmpty()) { - field(field, "same [empty map]"); - return; - } - field(field, null); - indent += 1; - compareMaps(actualMap, expectedMap); - indent -= 1; - return; - } - if (expected instanceof List) { - if (actual == null) { - field(field, "expected list but not found"); - return; - } - if (false == actual instanceof List) { - field(field, "expected list but found [" + actual + "]"); - return; - } - @SuppressWarnings("unchecked") - List expectedList = (List) expected; - @SuppressWarnings("unchecked") - List actualList = (List) actual; - if (expectedList.isEmpty() && actualList.isEmpty()) { - field(field, "same [empty list]"); - return; - } - field(field, null); - indent += 1; - compareLists(actualList, expectedList); - indent -= 1; - return; - } - if (actual == null) { - field(field, "expected [" + expected + "] but not found"); - return; - } - if (Objects.equals(expected, actual)) { - field(field, "same [" + expected + "]"); - return; - } - field(field, "expected [" + expected + "] but was [" + actual + "]"); - } - - private void indent() { - for (int i = 0; i < indent; i++) { - message.append(" "); - } - } - - private void field(Object name, String info) { - indent(); - message.append(String.format(Locale.ROOT, "%30s: ", name)); - if (info != null) { - message.append(info); - } - message.append('\n'); - } - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java index d53671bc6bc..5c093be3fa0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/RestTestSuite.java @@ -33,6 +33,7 @@ public class RestTestSuite { private final String name; private SetupSection setupSection; + private TeardownSection teardownSection; private Set testSections = new TreeSet<>(); @@ -61,6 +62,14 @@ public class RestTestSuite { this.setupSection = setupSection; } + public TeardownSection getTeardownSection() { + return teardownSection; + } + + public void setTeardownSection(TeardownSection teardownSection) { + this.teardownSection = teardownSection; + } + /** * Adds a {@link org.elasticsearch.test.rest.section.TestSection} to the REST suite * @return true if the test section was not already present, false otherwise diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/section/TeardownSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/section/TeardownSection.java new file mode 100644 index 00000000000..b3709472be5 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/section/TeardownSection.java @@ -0,0 +1,56 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.rest.section; + +import java.util.ArrayList; +import java.util.List; + +public class TeardownSection { + + public static final TeardownSection EMPTY; + + static { + EMPTY = new TeardownSection(); + EMPTY.setSkipSection(SkipSection.EMPTY); + } + + private SkipSection skipSection; + private List doSections = new ArrayList<>(); + + public SkipSection getSkipSection() { + return skipSection; + } + + public void setSkipSection(SkipSection skipSection) { + this.skipSection = skipSection; + } + + public List getDoSections() { + return doSections; + } + + public void addDoSection(DoSection doSection) { + this.doSections.add(doSection); + } + + public boolean isEmpty() { + return EMPTY.equals(this); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java index 106ff5176c7..c6ea48fd6ef 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/spec/RestSpec.java @@ -75,7 +75,7 @@ public class RestSpec { } restSpec.addApi(restApi); } - } catch (Throwable ex) { + } catch (Exception ex) { throw new IOException("Can't parse rest spec file: [" + jsonFile + "]", ex); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java b/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java index 66ba1528b90..3c78d432f19 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/support/Features.java @@ -34,7 +34,8 @@ import java.util.List; */ public final class Features { - private static final List SUPPORTED = Arrays.asList("stash_in_path", "groovy_scripting", "headers", "yaml"); + private static final List SUPPORTED = + Arrays.asList("stash_in_path", "groovy_scripting", "headers", "embedded_stash_key", "yaml"); private Features() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java b/test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java index 69acae55fdc..b32308f8cd8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/support/FileUtils.java @@ -78,7 +78,8 @@ public final class FileUtils { * Each input path can either be a single file (the .yaml suffix is optional) or a directory. * Each path is looked up in the classpath, or optionally from {@code fileSystem} if its not null. */ - public static Map> findYamlSuites(FileSystem fileSystem, String optionalPathPrefix, final String... paths) throws IOException { + public static Map> findYamlSuites(FileSystem fileSystem, String optionalPathPrefix, final String... paths) + throws IOException { Map> yamlSuites = new HashMap<>(); for (String path : paths) { collectFiles(resolveFile(fileSystem, optionalPathPrefix, path, YAML_SUFFIX), YAML_SUFFIX, yamlSuites); @@ -86,7 +87,8 @@ public final class FileUtils { return yamlSuites; } - private static Path resolveFile(FileSystem fileSystem, String optionalPathPrefix, String path, String optionalFileSuffix) throws IOException { + private static Path resolveFile(FileSystem fileSystem, String optionalPathPrefix, String path, String optionalFileSuffix) + throws IOException { if (fileSystem != null) { Path file = findFile(fileSystem, path, optionalFileSuffix); if (!lenientExists(file)) { @@ -94,7 +96,8 @@ public final class FileUtils { String newPath = optionalPathPrefix + "/" + path; file = findFile(fileSystem, newPath, optionalFileSuffix); if (!lenientExists(file)) { - throw new NoSuchFileException("path prefix: " + optionalPathPrefix + ", path: " + path + ", file suffix: " + optionalFileSuffix); + throw new NoSuchFileException("path prefix: " + optionalPathPrefix + ", path: " + path + ", file suffix: " + + optionalFileSuffix); } } return file; diff --git a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java index 7b1c3fd936b..1b99d2f32cb 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/store/MockFSDirectoryService.java @@ -131,11 +131,11 @@ public class MockFSDirectoryService extends FsDirectoryService { ESTestCase.checkIndexFailed = true; logger.warn("check index [failure] index files={}\n{}", Arrays.toString(dir.listAll()), - new String(os.bytes().toBytes(), StandardCharsets.UTF_8)); + os.bytes().utf8ToString()); throw new IOException("index check failure"); } else { if (logger.isDebugEnabled()) { - logger.debug("check index [success]\n{}", new String(os.bytes().toBytes(), StandardCharsets.UTF_8)); + logger.debug("check index [success]\n{}", os.bytes().utf8ToString()); } } } catch (LockObtainFailedException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java index b0d16d10c49..ec695e8bd41 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java +++ b/test/framework/src/main/java/org/elasticsearch/test/tasks/MockTaskManager.java @@ -50,8 +50,8 @@ public class MockTaskManager extends TaskManager { for (MockTaskManagerListener listener : listeners) { try { listener.onTaskRegistered(task); - } catch (Throwable t) { - logger.warn("failed to notify task manager listener about unregistering the task with id {}", t, task.getId()); + } catch (Exception e) { + logger.warn("failed to notify task manager listener about unregistering the task with id {}", e, task.getId()); } } } @@ -65,8 +65,8 @@ public class MockTaskManager extends TaskManager { for (MockTaskManagerListener listener : listeners) { try { listener.onTaskUnregistered(task); - } catch (Throwable t) { - logger.warn("failed to notify task manager listener about unregistering the task with id {}", t, task.getId()); + } catch (Exception e) { + logger.warn("failed to notify task manager listener about unregistering the task with id {}", e, task.getId()); } } } else { @@ -80,8 +80,8 @@ public class MockTaskManager extends TaskManager { for (MockTaskManagerListener listener : listeners) { try { listener.waitForTaskCompletion(task); - } catch (Throwable t) { - logger.warn("failed to notify task manager listener about waitForTaskCompletion the task with id {}", t, task.getId()); + } catch (Exception e) { + logger.warn("failed to notify task manager listener about waitForTaskCompletion the task with id {}", e, task.getId()); } } super.waitForTaskCompletion(task, untilInNanos); diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java index 0142acf8c17..2724f53857d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/AssertingLocalTransport.java @@ -47,13 +47,11 @@ import java.util.Random; public class AssertingLocalTransport extends LocalTransport { + public static final String ASSERTING_TRANSPORT_NAME = "asserting_local"; + public static class TestPlugin extends Plugin { public void onModule(NetworkModule module) { - module.registerTransport("mock", AssertingLocalTransport.class); - } - @Override - public Settings additionalSettings() { - return Settings.builder().put(NetworkModule.TRANSPORT_TYPE_KEY, "mock").build(); + module.registerTransport(ASSERTING_TRANSPORT_NAME, AssertingLocalTransport.class); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java index 654a1c971ca..4ff899aeac2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java @@ -19,10 +19,13 @@ package org.elasticsearch.test.transport; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Randomness; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; +import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -46,16 +49,18 @@ import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; +import static org.apache.lucene.util.LuceneTestCase.rarely; + /** A transport class that doesn't send anything but rather captures all requests for inspection from tests */ public class CapturingTransport implements Transport { private TransportServiceAdapter adapter; - static public class CapturedRequest { - final public DiscoveryNode node; - final public long requestId; - final public String action; - final public TransportRequest request; + public static class CapturedRequest { + public final DiscoveryNode node; + public final long requestId; + public final String action; + public final TransportRequest request; public CapturedRequest(DiscoveryNode node, long requestId, String action, TransportRequest request) { this.node = node; @@ -150,7 +155,18 @@ public class CapturingTransport implements Transport { * @param t the failure to wrap */ public void handleRemoteError(final long requestId, final Throwable t) { - this.handleError(requestId, new RemoteTransportException("remote failure", t)); + final RemoteTransportException remoteException; + if (rarely(Randomness.get())) { + remoteException = new RemoteTransportException("remote failure, coming from local node", t); + } else { + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.writeException(t); + remoteException = new RemoteTransportException("remote failure", output.bytes().streamInput().readException()); + } catch (IOException ioException) { + throw new ElasticsearchException("failed to serialize/deserialize supplied exception " + t, ioException); + } + } + this.handleError(requestId, remoteException); } /** @@ -242,19 +258,13 @@ public class CapturingTransport implements Transport { } @Override - public Transport start() { - return null; - } + public void start() {} @Override - public Transport stop() { - return null; - } + public void stop() {} @Override - public void close() { - - } + public void close() {} @Override public List getLocalAddresses() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 7688e2842e6..d7f806f27b0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -20,7 +20,6 @@ package org.elasticsearch.test.transport; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.transport.TransportService; @@ -30,15 +29,12 @@ import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -52,10 +48,8 @@ import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportServiceAdapter; import org.elasticsearch.transport.local.LocalTransport; -import org.elasticsearch.transport.netty.NettyTransport; import java.io.IOException; import java.util.Arrays; @@ -105,21 +99,6 @@ public class MockTransportService extends TransportService { return new MockTransportService(settings, transport, threadPool); } - public static MockTransportService nettyFromThreadPool( - Settings settings, - ThreadPool threadPool, final Version version) { - NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); - Transport transport = new NettyTransport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, - namedWriteableRegistry, new NoneCircuitBreakerService()) { - @Override - protected Version getCurrentVersion() { - return version; - } - }; - return new MockTransportService(Settings.EMPTY, transport, threadPool); - } - - private final Transport original; @Inject @@ -383,11 +362,11 @@ public class MockTransportService extends TransportService { BytesStreamOutput bStream = new BytesStreamOutput(); request.writeTo(bStream); final TransportRequest clonedRequest = reg.newRequest(); - clonedRequest.readFrom(StreamInput.wrap(bStream.bytes())); + clonedRequest.readFrom(bStream.bytes().streamInput()); threadPool.schedule(delay, ThreadPool.Names.GENERIC, new AbstractRunnable() { @Override - public void onFailure(Throwable e) { + public void onFailure(Exception e) { logger.debug("failed to send delayed request", e); } @@ -558,15 +537,13 @@ public class MockTransportService extends TransportService { } @Override - public Transport start() { + public void start() { transport.start(); - return this; } @Override - public Transport stop() { + public void stop() { transport.stop(); - return this; } @Override @@ -641,10 +618,10 @@ public class MockTransportService extends TransportService { } @Override - protected void traceResponseSent(long requestId, String action, Throwable t) { - super.traceResponseSent(requestId, action, t); + protected void traceResponseSent(long requestId, String action, Exception e) { + super.traceResponseSent(requestId, action, e); for (Tracer tracer : activeTracers) { - tracer.responseSent(requestId, action, t); + tracer.responseSent(requestId, action, e); } } diff --git a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java similarity index 72% rename from core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java rename to test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 9ae029a4aa4..42275c75e5a 100644 --- a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -19,8 +19,8 @@ package org.elasticsearch.transport; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -37,11 +37,16 @@ import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.emptyMap; @@ -51,9 +56,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; -/** - * - */ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { protected ThreadPool threadPool; @@ -62,7 +64,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { protected DiscoveryNode nodeA; protected MockTransportService serviceA; - protected static final Version version1 = Version.fromId(Version.CURRENT.id+1); + protected static final Version version1 = Version.fromId(Version.CURRENT.id + 1); protected DiscoveryNode nodeB; protected MockTransportService serviceB; @@ -74,24 +76,25 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { super.setUp(); threadPool = new TestThreadPool(getClass().getName()); serviceA = build( - Settings.builder() - .put("name", "TS_A") - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") - .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") - .build(), - version0); + Settings.builder() + .put("name", "TS_A") + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .build(), + version0); serviceA.acceptIncomingRequests(); nodeA = new DiscoveryNode("TS_A", serviceA.boundAddress().publishAddress(), emptyMap(), emptySet(), version0); + // serviceA.setLocalNode(nodeA); serviceB = build( - Settings.builder() - .put("name", "TS_B") - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") - .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") - .build(), - version1); + Settings.builder() + .put("name", "TS_B") + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .build(), + version1); serviceB.acceptIncomingRequests(); nodeB = new DiscoveryNode("TS_B", serviceB.boundAddress().publishAddress(), emptyMap(), emptySet(), version1); - + //serviceB.setLocalNode(nodeB); // wait till all nodes are properly connected and the event has been sent, so tests in this class // will not get this callback called on the connections done in this setup final boolean useLocalNode = randomBoolean(); @@ -140,41 +143,41 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { public void testHelloWorld() { serviceA.registerRequestHandler("sayHello", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { - @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) { - assertThat("moshe", equalTo(request.message)); - try { - channel.sendResponse(new StringMessageResponse("hello " + request.message)); - } catch (IOException e) { - logger.error("Unexpected failure", e); - fail(e.getMessage()); + @Override + public void messageReceived(StringMessageRequest request, TransportChannel channel) { + assertThat("moshe", equalTo(request.message)); + try { + channel.sendResponse(new StringMessageResponse("hello " + request.message)); + } catch (IOException e) { + logger.error("Unexpected failure", e); + fail(e.getMessage()); + } } - } - }); + }); TransportFuture res = serviceB.submitRequest(nodeA, "sayHello", - new StringMessageRequest("moshe"), new BaseTransportResponseHandler() { - @Override - public StringMessageResponse newInstance() { - return new StringMessageResponse(); - } + new StringMessageRequest("moshe"), new TransportResponseHandler() { + @Override + public StringMessageResponse newInstance() { + return new StringMessageResponse(); + } - @Override - public String executor() { - return ThreadPool.Names.GENERIC; - } + @Override + public String executor() { + return ThreadPool.Names.GENERIC; + } - @Override - public void handleResponse(StringMessageResponse response) { - assertThat("hello moshe", equalTo(response.message)); - } + @Override + public void handleResponse(StringMessageResponse response) { + assertThat("hello moshe", equalTo(response.message)); + } - @Override - public void handleException(TransportException exp) { - logger.error("Unexpected failure", exp); - fail("got exception instead of a response: " + exp.getMessage()); - } - }); + @Override + public void handleException(TransportException exp) { + logger.error("Unexpected failure", exp); + fail("got exception instead of a response: " + exp.getMessage()); + } + }); try { StringMessageResponse message = res.get(); @@ -184,28 +187,28 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } res = serviceB.submitRequest(nodeA, "sayHello", new StringMessageRequest("moshe"), - TransportRequestOptions.builder().withCompress(true).build(), new BaseTransportResponseHandler() { - @Override - public StringMessageResponse newInstance() { - return new StringMessageResponse(); - } + TransportRequestOptions.builder().withCompress(true).build(), new TransportResponseHandler() { + @Override + public StringMessageResponse newInstance() { + return new StringMessageResponse(); + } - @Override - public String executor() { - return ThreadPool.Names.GENERIC; - } + @Override + public String executor() { + return ThreadPool.Names.GENERIC; + } - @Override - public void handleResponse(StringMessageResponse response) { - assertThat("hello moshe", equalTo(response.message)); - } + @Override + public void handleResponse(StringMessageResponse response) { + assertThat("hello moshe", equalTo(response.message)); + } - @Override - public void handleException(TransportException exp) { - logger.error("Unexpected failure", exp); - fail("got exception instead of a response: " + exp.getMessage()); - } - }); + @Override + public void handleException(TransportException exp) { + logger.error("Unexpected failure", exp); + fail("got exception instead of a response: " + exp.getMessage()); + } + }); try { StringMessageResponse message = res.get(); @@ -233,7 +236,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { }); final Object context = new Object(); final String executor = randomFrom(ThreadPool.THREAD_POOL_TYPES.keySet().toArray(new String[0])); - TransportResponseHandler responseHandler = new BaseTransportResponseHandler() { + TransportResponseHandler responseHandler = new TransportResponseHandler() { @Override public StringMessageResponse newInstance() { return new StringMessageResponse(); @@ -335,8 +338,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { }); TransportFuture res = serviceB.submitRequest(nodeA, "sayHello", - TransportRequest.Empty.INSTANCE, TransportRequestOptions.builder().withCompress(true).build(), - new BaseTransportResponseHandler() { + TransportRequest.Empty.INSTANCE, TransportRequestOptions.builder().withCompress(true).build(), + new TransportResponseHandler() { @Override public TransportResponse.Empty newInstance() { return TransportResponse.Empty.INSTANCE; @@ -382,11 +385,11 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { fail(e.getMessage()); } } - }); + }); TransportFuture res = serviceB.submitRequest(nodeA, "sayHello", - new StringMessageRequest("moshe"), TransportRequestOptions.builder().withCompress(true).build(), - new BaseTransportResponseHandler() { + new StringMessageRequest("moshe"), TransportRequestOptions.builder().withCompress(true).build(), + new TransportResponseHandler() { @Override public StringMessageResponse newInstance() { return new StringMessageResponse(); @@ -427,30 +430,30 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { assertThat("moshe", equalTo(request.message)); throw new RuntimeException("bad message !!!"); } - }); + }); TransportFuture res = serviceB.submitRequest(nodeA, "sayHelloException", - new StringMessageRequest("moshe"), new BaseTransportResponseHandler() { - @Override - public StringMessageResponse newInstance() { - return new StringMessageResponse(); - } + new StringMessageRequest("moshe"), new TransportResponseHandler() { + @Override + public StringMessageResponse newInstance() { + return new StringMessageResponse(); + } - @Override - public String executor() { - return ThreadPool.Names.GENERIC; - } + @Override + public String executor() { + return ThreadPool.Names.GENERIC; + } - @Override - public void handleResponse(StringMessageResponse response) { - fail("got response instead of exception"); - } + @Override + public void handleResponse(StringMessageResponse response) { + fail("got response instead of exception"); + } - @Override - public void handleException(TransportException exp) { - assertThat("runtime_exception: bad message !!!", equalTo(exp.getCause().getMessage())); - } - }); + @Override + public void handleException(TransportException exp) { + assertThat("runtime_exception: bad message !!!", equalTo(exp.getCause().getMessage())); + } + }); try { res.txGet(); @@ -497,7 +500,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } }); TransportFuture foobar = serviceB.submitRequest(nodeA, "foobar", - new StringMessageRequest(""), TransportRequestOptions.EMPTY, EmptyTransportResponseHandler.INSTANCE_SAME); + new StringMessageRequest(""), TransportRequestOptions.EMPTY, EmptyTransportResponseHandler.INSTANCE_SAME); latch2.countDown(); try { foobar.txGet(); @@ -519,8 +522,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { }); TransportFuture res = serviceB.submitRequest(nodeA, "sayHelloTimeoutNoResponse", - new StringMessageRequest("moshe"), TransportRequestOptions.builder().withTimeout(100).build(), - new BaseTransportResponseHandler() { + new StringMessageRequest("moshe"), TransportRequestOptions.builder().withTimeout(100).build(), + new TransportResponseHandler() { @Override public StringMessageResponse newInstance() { return new StringMessageResponse(); @@ -553,29 +556,40 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } public void testTimeoutSendExceptionWithDelayedResponse() throws Exception { - CountDownLatch doneLatch = new CountDownLatch(1); + CountDownLatch waitForever = new CountDownLatch(1); + CountDownLatch doneWaitingForever = new CountDownLatch(1); + Semaphore inFlight = new Semaphore(Integer.MAX_VALUE); serviceA.registerRequestHandler("sayHelloTimeoutDelayedResponse", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) { - TimeValue sleep = TimeValue.parseTimeValue(request.message, null, "sleep"); + public void messageReceived(StringMessageRequest request, TransportChannel channel) throws InterruptedException { + String message = request.message; + inFlight.acquireUninterruptibly(); try { - doneLatch.await(sleep.millis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - // ignore - } - try { - channel.sendResponse(new StringMessageResponse("hello " + request.message)); - } catch (IOException e) { - logger.error("Unexpected failure", e); - fail(e.getMessage()); + if ("forever".equals(message)) { + waitForever.await(); + } else { + TimeValue sleep = TimeValue.parseTimeValue(message, null, "sleep"); + Thread.sleep(sleep.millis()); + } + try { + channel.sendResponse(new StringMessageResponse("hello " + request.message)); + } catch (IOException e) { + logger.error("Unexpected failure", e); + fail(e.getMessage()); + } + } finally { + inFlight.release(); + if ("forever".equals(message)) { + doneWaitingForever.countDown(); + } } } - }); + }); final CountDownLatch latch = new CountDownLatch(1); TransportFuture res = serviceB.submitRequest(nodeA, "sayHelloTimeoutDelayedResponse", - new StringMessageRequest("2m"), TransportRequestOptions.builder().withTimeout(100).build(), - new BaseTransportResponseHandler() { + new StringMessageRequest("forever"), TransportRequestOptions.builder().withTimeout(100).build(), + new TransportResponseHandler() { @Override public StringMessageResponse newInstance() { return new StringMessageResponse(); @@ -600,19 +614,20 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { }); try { - StringMessageResponse message = res.txGet(); + res.txGet(); fail("exception should be thrown"); } catch (Exception e) { assertThat(e, instanceOf(ReceiveTimeoutTransportException.class)); } latch.await(); + List assertions = new ArrayList<>(); for (int i = 0; i < 10; i++) { final int counter = i; // now, try and send another request, this times, with a short timeout - res = serviceB.submitRequest(nodeA, "sayHelloTimeoutDelayedResponse", - new StringMessageRequest(counter + "ms"), TransportRequestOptions.builder().withTimeout(3000).build(), - new BaseTransportResponseHandler() { + TransportFuture result = serviceB.submitRequest(nodeA, "sayHelloTimeoutDelayedResponse", + new StringMessageRequest(counter + "ms"), TransportRequestOptions.builder().withTimeout(3000).build(), + new TransportResponseHandler() { @Override public StringMessageResponse newInstance() { return new StringMessageResponse(); @@ -635,12 +650,18 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } }); - StringMessageResponse message = res.txGet(); - assertThat(message.message, equalTo("hello " + counter + "ms")); + assertions.add(() -> { + StringMessageResponse message = result.txGet(); + assertThat(message.message, equalTo("hello " + counter + "ms")); + }); + } + for (Runnable runnable : assertions) { + runnable.run(); } - serviceA.removeHandler("sayHelloTimeoutDelayedResponse"); - doneLatch.countDown(); + waitForever.countDown(); + doneWaitingForever.await(); + assertTrue(inFlight.tryAcquire(Integer.MAX_VALUE, 10, TimeUnit.SECONDS)); } @TestLogging(value = "test. transport.tracer:TRACE") @@ -664,7 +685,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { }; final Semaphore requestCompleted = new Semaphore(0); - TransportResponseHandler noopResponseHandler = new BaseTransportResponseHandler() { + TransportResponseHandler noopResponseHandler = new TransportResponseHandler() { @Override public StringMessageResponse newInstance() { @@ -698,7 +719,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { tracer.reset(4); boolean timeout = randomBoolean(); - TransportRequestOptions options = timeout ? TransportRequestOptions.builder().withTimeout(1).build(): TransportRequestOptions.EMPTY; + TransportRequestOptions options = timeout ? TransportRequestOptions.builder().withTimeout(1).build() : + TransportRequestOptions.EMPTY; serviceA.sendRequest(nodeB, "test", new StringMessageRequest("", 10), options, noopResponseHandler); requestCompleted.acquire(); tracer.expectedEvents.get().await(); @@ -967,7 +989,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { Version0Request version0Request = new Version0Request(); version0Request.value1 = 1; Version0Response version0Response = serviceA.submitRequest(nodeB, "/version", version0Request, - new BaseTransportResponseHandler() { + new TransportResponseHandler() { @Override public Version0Response newInstance() { return new Version0Response(); @@ -1009,7 +1031,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { version1Request.value1 = 1; version1Request.value2 = 2; Version1Response version1Response = serviceB.submitRequest(nodeA, "/version", version1Request, - new BaseTransportResponseHandler() { + new TransportResponseHandler() { @Override public Version1Response newInstance() { return new Version1Response(); @@ -1055,7 +1077,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { version1Request.value1 = 1; version1Request.value2 = 2; Version1Response version1Response = serviceB.submitRequest(nodeB, "/version", version1Request, - new BaseTransportResponseHandler() { + new TransportResponseHandler() { @Override public Version1Response newInstance() { return new Version1Response(); @@ -1085,20 +1107,17 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { public void testVersionFrom0to0() throws Exception { serviceA.registerRequestHandler("/version", Version0Request::new, ThreadPool.Names.SAME, - new TransportRequestHandler() { - @Override - public void messageReceived(Version0Request request, TransportChannel channel) throws Exception { - assertThat(request.value1, equalTo(1)); - Version0Response response = new Version0Response(); - response.value1 = 1; - channel.sendResponse(response); - } + (request, channel) -> { + assertThat(request.value1, equalTo(1)); + Version0Response response = new Version0Response(); + response.value1 = 1; + channel.sendResponse(response); }); Version0Request version0Request = new Version0Request(); version0Request.value1 = 1; Version0Response version0Response = serviceA.submitRequest(nodeA, "/version", version0Request, - new BaseTransportResponseHandler() { + new TransportResponseHandler() { @Override public Version0Response newInstance() { return new Version0Response(); @@ -1137,27 +1156,27 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { serviceB.addFailToSendNoConnectRule(serviceA); TransportFuture res = serviceB.submitRequest(nodeA, "sayHello", - new StringMessageRequest("moshe"), new BaseTransportResponseHandler() { - @Override - public StringMessageResponse newInstance() { - return new StringMessageResponse(); - } + new StringMessageRequest("moshe"), new TransportResponseHandler() { + @Override + public StringMessageResponse newInstance() { + return new StringMessageResponse(); + } - @Override - public String executor() { - return ThreadPool.Names.GENERIC; - } + @Override + public String executor() { + return ThreadPool.Names.GENERIC; + } - @Override - public void handleResponse(StringMessageResponse response) { - fail("got response instead of exception"); - } + @Override + public void handleResponse(StringMessageResponse response) { + fail("got response instead of exception"); + } - @Override - public void handleException(TransportException exp) { - assertThat(exp.getCause().getMessage(), endsWith("DISCONNECT: simulated")); - } - }); + @Override + public void handleException(TransportException exp) { + assertThat(exp.getCause().getMessage(), endsWith("DISCONNECT: simulated")); + } + }); try { res.txGet(); @@ -1196,8 +1215,8 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { serviceB.addUnresponsiveRule(serviceA); TransportFuture res = serviceB.submitRequest(nodeA, "sayHello", - new StringMessageRequest("moshe"), TransportRequestOptions.builder().withTimeout(100).build(), - new BaseTransportResponseHandler() { + new StringMessageRequest("moshe"), TransportRequestOptions.builder().withTimeout(100).build(), + new TransportResponseHandler() { @Override public StringMessageResponse newInstance() { return new StringMessageResponse(); @@ -1289,18 +1308,18 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { public void testBlockingIncomingRequests() throws Exception { TransportService service = build( - Settings.builder() - .put("name", "TS_TEST") - .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") - .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") - .build(), - version0); + Settings.builder() + .put("name", "TS_TEST") + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .build(), + version0); AtomicBoolean requestProcessed = new AtomicBoolean(); service.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { - requestProcessed.set(true); - channel.sendResponse(TransportResponse.Empty.INSTANCE); - }); + (request, channel) -> { + requestProcessed.set(true); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + }); DiscoveryNode node = new DiscoveryNode("TS_TEST", "TS_TEST", service.boundAddress().publishAddress(), emptyMap(), emptySet(), version0); @@ -1340,8 +1359,239 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } public static class TestRequest extends TransportRequest { + + String info; + int resendCount; + + public TestRequest() { + } + + public TestRequest(String info) { + this.info = info; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + info = in.readOptionalString(); + resendCount = in.readInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalString(info); + out.writeInt(resendCount); + } + + @Override + public String toString() { + return "TestRequest{" + + "info='" + info + '\'' + + '}'; + } } private static class TestResponse extends TransportResponse { + + String info; + + public TestResponse() { + } + + public TestResponse(String info) { + this.info = info; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + info = in.readOptionalString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalString(info); + } + + @Override + public String toString() { + return "TestResponse{" + + "info='" + info + '\'' + + '}'; + } + } + + public void testSendRandomRequests() throws InterruptedException { + TransportService serviceC = build( + Settings.builder() + .put("name", "TS_TEST") + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "") + .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING") + .build(), + version0); + DiscoveryNode nodeC = + new DiscoveryNode("TS_C", "TS_C", serviceC.boundAddress().publishAddress(), emptyMap(), emptySet(), version0); + serviceC.acceptIncomingRequests(); + + final CountDownLatch latch = new CountDownLatch(5); + TransportConnectionListener waitForConnection = new TransportConnectionListener() { + @Override + public void onNodeConnected(DiscoveryNode node) { + latch.countDown(); + } + + @Override + public void onNodeDisconnected(DiscoveryNode node) { + fail("disconnect should not be called " + node); + } + }; + serviceA.addConnectionListener(waitForConnection); + serviceB.addConnectionListener(waitForConnection); + serviceC.addConnectionListener(waitForConnection); + + serviceC.connectToNode(nodeA); + serviceC.connectToNode(nodeB); + serviceA.connectToNode(nodeC); + serviceB.connectToNode(nodeC); + serviceC.connectToNode(nodeC); + + latch.await(); + serviceA.removeConnectionListener(waitForConnection); + serviceB.removeConnectionListener(waitForConnection); + serviceB.removeConnectionListener(waitForConnection); + + + Map toNodeMap = new HashMap<>(); + toNodeMap.put(serviceA, nodeA); + toNodeMap.put(serviceB, nodeB); + toNodeMap.put(serviceC, nodeC); + AtomicBoolean fail = new AtomicBoolean(false); + class TestRequestHandler implements TransportRequestHandler { + + private final TransportService service; + + TestRequestHandler(TransportService service) { + this.service = service; + } + + @Override + public void messageReceived(TestRequest request, TransportChannel channel) throws Exception { + if (randomBoolean()) { + Thread.sleep(randomIntBetween(10, 50)); + } + if (fail.get()) { + throw new IOException("forced failure"); + } + + if (randomBoolean() && request.resendCount++ < 20) { + DiscoveryNode node = randomFrom(nodeA, nodeB, nodeC); + logger.debug("send secondary request from {} to {} - {}", toNodeMap.get(service), node, request.info); + service.sendRequest(node, "action1", new TestRequest("secondary " + request.info), + TransportRequestOptions.builder().withCompress(randomBoolean()).build(), + new TransportResponseHandler() { + @Override + public TestResponse newInstance() { + return new TestResponse(); + } + + @Override + public void handleResponse(TestResponse response) { + try { + if (randomBoolean()) { + Thread.sleep(randomIntBetween(10, 50)); + } + logger.debug("send secondary response {}", response.info); + + channel.sendResponse(response); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + public void handleException(TransportException exp) { + try { + logger.debug("send secondary exception response for request {}", request.info); + channel.sendResponse(exp); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Override + public String executor() { + return randomBoolean() ? ThreadPool.Names.SAME : ThreadPool.Names.GENERIC; + } + }); + } else { + logger.debug("send response for {}", request.info); + channel.sendResponse(new TestResponse("Response for: " + request.info)); + } + + } + } + serviceB.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), + new TestRequestHandler(serviceB)); + serviceC.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), + new TestRequestHandler(serviceC)); + serviceA.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), + new TestRequestHandler(serviceA)); + int iters = randomIntBetween(30, 60); + CountDownLatch allRequestsDone = new CountDownLatch(iters); + class TestResponseHandler implements TransportResponseHandler { + + private final int id; + + public TestResponseHandler(int id) { + this.id = id; + } + + @Override + public TestResponse newInstance() { + return new TestResponse(); + } + + @Override + public void handleResponse(TestResponse response) { + logger.debug("---> received response: {}", response.info); + allRequestsDone.countDown(); + } + + @Override + public void handleException(TransportException exp) { + logger.debug("---> received exception for id {}", exp, id); + allRequestsDone.countDown(); + Throwable unwrap = ExceptionsHelper.unwrap(exp, IOException.class); + assertNotNull(unwrap); + assertEquals(IOException.class, unwrap.getClass()); + assertEquals("forced failure", unwrap.getMessage()); + } + + @Override + public String executor() { + return randomBoolean() ? ThreadPool.Names.SAME : ThreadPool.Names.GENERIC; + } + } + + for (int i = 0; i < iters; i++) { + TransportService service = randomFrom(serviceC, serviceB, serviceA); + DiscoveryNode node = randomFrom(nodeC, nodeB, nodeA); + logger.debug("send from {} to {}", toNodeMap.get(service), node); + service.sendRequest(node, "action1", new TestRequest("REQ[" + i + "]"), + TransportRequestOptions.builder().withCompress(randomBoolean()).build(), new TestResponseHandler(i)); + } + logger.debug("waiting for response"); + fail.set(randomBoolean()); + boolean await = allRequestsDone.await(5, TimeUnit.SECONDS); + if (await == false) { + logger.debug("now failing forcefully"); + fail.set(true); + assertTrue(allRequestsDone.await(5, TimeUnit.SECONDS)); + } + logger.debug("DONE"); + serviceC.close(); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java new file mode 100644 index 00000000000..eb4613f9e12 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -0,0 +1,353 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport; + +import org.apache.lucene.util.IOUtils; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.InputStreamStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.CancellableThreads; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.threadpool.ThreadPool; + +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.Closeable; +import java.io.IOException; +import java.io.OutputStream; +import java.net.InetSocketAddress; +import java.net.ServerSocket; +import java.net.Socket; +import java.net.SocketException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.RejectedExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +/** + * This is a socket based blocking TcpTransport implementation that is used for tests + * that need real networking. This implementation is a test only implementation that implements + * the networking layer in the worst possible way since it blocks and uses a thread per request model. + */ +public class MockTcpTransport extends TcpTransport { + + private final ExecutorService executor; + private final Version mockVersion; + + @Inject + public MockTcpTransport(Settings settings, ThreadPool threadPool, BigArrays bigArrays, + CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, + NetworkService networkService) { + this(settings, threadPool, bigArrays, circuitBreakerService, namedWriteableRegistry, networkService, + Version.CURRENT); + } + + public MockTcpTransport(Settings settings, ThreadPool threadPool, BigArrays bigArrays, + CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, + NetworkService networkService, Version mockVersion) { + super("mock-tcp-transport", settings, threadPool, bigArrays, circuitBreakerService, namedWriteableRegistry, networkService); + // we have our own crazy cached threadpool this one is not bounded at all... + // using the ES thread factory here is crucial for tests otherwise disruption tests won't block that thread + executor = Executors.newCachedThreadPool(EsExecutors.daemonThreadFactory(settings, Transports.TEST_MOCK_TRANSPORT_THREAD_PREFIX)); + this.mockVersion = mockVersion; + } + + @Override + protected InetSocketAddress getLocalAddress(MockChannel mockChannel) { + return mockChannel.localAddress; + } + + @Override + protected MockChannel bind(final String name, InetSocketAddress address) throws IOException { + ServerSocket socket = new ServerSocket(); + socket.bind(address); + socket.setReuseAddress(TCP_REUSE_ADDRESS.get(settings())); + ByteSizeValue tcpReceiveBufferSize = TCP_RECEIVE_BUFFER_SIZE.get(settings); + if (tcpReceiveBufferSize.bytes() > 0) { + socket.setReceiveBufferSize(tcpReceiveBufferSize.bytesAsInt()); + } + MockChannel serverMockChannel = new MockChannel(socket, name); + CountDownLatch started = new CountDownLatch(1); + executor.execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + try { + onException(serverMockChannel, e); + } catch (IOException ex) { + logger.warn("failed on handling exception", ex); + } + } + + @Override + protected void doRun() throws Exception { + started.countDown(); + serverMockChannel.accept(executor); + } + }); + try { + started.await(); + } catch (InterruptedException e) { + Thread.interrupted(); + } + return serverMockChannel; + } + + private void readMessage(MockChannel mockChannel, StreamInput input) throws IOException { + Socket socket = mockChannel.activeChannel; + byte[] minimalHeader = new byte[TcpHeader.MARKER_BYTES_SIZE]; + int firstByte = input.read(); + if (firstByte == -1) { + throw new IOException("Connection reset by peer"); + } + minimalHeader[0] = (byte) firstByte; + minimalHeader[1] = (byte) input.read(); + int msgSize = input.readInt(); + if (msgSize == -1) { + socket.getOutputStream().flush(); + } else { + BytesStreamOutput output = new BytesStreamOutput(); + final byte[] buffer = new byte[msgSize]; + input.readFully(buffer); + output.write(minimalHeader); + output.writeInt(msgSize); + output.write(buffer); + BytesReference bytes = output.bytes(); + if (validateMessageHeader(bytes)) { + InetSocketAddress remoteAddress = (InetSocketAddress) socket.getRemoteSocketAddress(); + messageReceived(bytes.slice(TcpHeader.MARKER_BYTES_SIZE + TcpHeader.MESSAGE_LENGTH_SIZE, msgSize), + mockChannel, mockChannel.profile, remoteAddress, msgSize); + } else { + // ping message - we just drop all stuff + } + } + } + + @Override + protected NodeChannels connectToChannelsLight(DiscoveryNode node) throws IOException { + return connectToChannels(node); + } + + @Override + protected NodeChannels connectToChannels(DiscoveryNode node) throws IOException { + final NodeChannels nodeChannels = new NodeChannels(new MockChannel[1], + new MockChannel[1], + new MockChannel[1], + new MockChannel[1], + new MockChannel[1]); + boolean success = false; + final Socket socket = new Socket(); + try { + Consumer onClose = (channel) -> { + final NodeChannels connected = connectedNodes.get(node); + if (connected != null && connected.hasChannel(channel)) { + try { + executor.execute(() -> { + disconnectFromNode(node, channel, "channel closed event"); + }); + } catch (RejectedExecutionException ex) { + logger.debug("failed to run disconnectFromNode - node is shutting down"); + } + } + }; + InetSocketAddress address = ((InetSocketTransportAddress) node.getAddress()).address(); + // we just use a single connections + configureSocket(socket); + socket.connect(address, (int) TCP_CONNECT_TIMEOUT.get(settings).millis()); + MockChannel channel = new MockChannel(socket, address, "none", onClose); + channel.loopRead(executor); + for (MockChannel[] channels : nodeChannels.getChannelArrays()) { + for (int i = 0; i < channels.length; i++) { + channels[i] = channel; + } + } + success = true; + } finally { + if (success == false) { + IOUtils.close(nodeChannels, socket); + } + } + + return nodeChannels; + } + + + + private void configureSocket(Socket socket) throws SocketException { + socket.setTcpNoDelay(TCP_NO_DELAY.get(settings)); + ByteSizeValue tcpSendBufferSize = TCP_SEND_BUFFER_SIZE.get(settings); + if (tcpSendBufferSize.bytes() > 0) { + socket.setSendBufferSize(tcpSendBufferSize.bytesAsInt()); + } + ByteSizeValue tcpReceiveBufferSize = TCP_RECEIVE_BUFFER_SIZE.get(settings); + if (tcpReceiveBufferSize.bytes() > 0) { + socket.setReceiveBufferSize(tcpReceiveBufferSize.bytesAsInt()); + } + socket.setReuseAddress(TCP_REUSE_ADDRESS.get(settings())); + } + + @Override + protected boolean isOpen(MockChannel mockChannel) { + return mockChannel.isOpen.get(); + } + + @Override + protected void sendMessage(MockChannel mockChannel, BytesReference reference, Runnable sendListener, boolean close) throws IOException { + synchronized (mockChannel) { + final Socket socket = mockChannel.activeChannel; + OutputStream outputStream = new BufferedOutputStream(socket.getOutputStream()); + reference.writeTo(outputStream); + outputStream.flush(); + } + if (sendListener != null) { + sendListener.run(); + } + if (close) { + IOUtils.closeWhileHandlingException(mockChannel); + } + } + + @Override + protected void closeChannels(List channel) throws IOException { + IOUtils.close(channel); + } + + @Override + public long serverOpen() { + return 1; + } + + public final class MockChannel implements Closeable { + private final AtomicBoolean isOpen = new AtomicBoolean(true); + private final InetSocketAddress localAddress; + private final ServerSocket serverSocket; + private final ConcurrentHashMap workerChannels = new ConcurrentHashMap<>(); + private final Socket activeChannel; + private final String profile; + private final CancellableThreads cancellableThreads = new CancellableThreads(); + private final Closeable onClose; + + public MockChannel(Socket socket, InetSocketAddress localAddress, String profile, Consumer onClose) { + this.localAddress = localAddress; + this.activeChannel = socket; + this.serverSocket = null; + this.profile = profile; + this.onClose = () -> onClose.accept(this); + } + public void accept(Executor executor) throws IOException { + while (isOpen.get()) { + Socket accept = serverSocket.accept(); + configureSocket(accept); + MockChannel mockChannel = new MockChannel(accept, localAddress, profile, workerChannels::remove); + workerChannels.put(mockChannel, Boolean.TRUE); + mockChannel.loopRead(executor); + } + } + + public void loopRead(Executor executor) { + executor.execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + if (isOpen.get()) { + try { + onException(MockChannel.this, e); + } catch (IOException ex) { + logger.warn("failed on handling exception", ex); + } + } + } + + @Override + protected void doRun() throws Exception { + StreamInput input = new InputStreamStreamInput(new BufferedInputStream(activeChannel.getInputStream())); + while (isOpen.get()) { + cancellableThreads.executeIO(() -> readMessage(MockChannel.this, input)); + } + } + }); + } + + public MockChannel(ServerSocket serverSocket, String profile) { + this.localAddress = (InetSocketAddress) serverSocket.getLocalSocketAddress(); + this.serverSocket = serverSocket; + this.profile = profile; + this.activeChannel = null; + this.onClose = null; + } + + @Override + public void close() throws IOException { + if (isOpen.compareAndSet(true, false)) { + IOUtils.close( () -> cancellableThreads.cancel("channel closed"), serverSocket, activeChannel, + () -> IOUtils.close(workerChannels.keySet()), onClose); + } + } + } + + + @Override + protected void doStart() { + boolean success = false; + try { + if (NetworkService.NETWORK_SERVER.get(settings)) { + // loop through all profiles and start them up, special handling for default one + for (Map.Entry entry : buildProfileSettings().entrySet()) { + final Settings settings = Settings.builder() + .put(entry.getValue()).build(); + bindServer(entry.getKey(), settings); + } + } + super.doStart(); + success = true; + } finally { + if (success == false) { + doStop(); + } + } + } + + @Override + protected void stopInternal() { + ThreadPool.terminate(executor, 10, TimeUnit.SECONDS); + } + + @Override + protected Version getCurrentVersion() { + return mockVersion; + } +} + diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStorageModule.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransportPlugin.java similarity index 64% rename from plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStorageModule.java rename to test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransportPlugin.java index 8a4bf88ed74..3e17cdcb30a 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/plugin/repository/gcs/GoogleCloudStorageModule.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransportPlugin.java @@ -16,16 +16,16 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.transport; -package org.elasticsearch.plugin.repository.gcs; +import org.elasticsearch.common.network.NetworkModule; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.repositories.gcs.GoogleCloudStorageService; +public class MockTcpTransportPlugin extends Plugin { + public static final String MOCK_TCP_TRANSPORT_NAME = "mock-socket-network"; -public class GoogleCloudStorageModule extends AbstractModule { - - @Override - protected void configure() { - bind(GoogleCloudStorageService.class).to(GoogleCloudStorageService.InternalGoogleCloudStorageService.class).asEagerSingleton(); + public void onModule(NetworkModule module) { + module.registerTransport(MOCK_TCP_TRANSPORT_NAME, MockTcpTransport.class); } } diff --git a/core/src/main/java/org/elasticsearch/repositories/RepositoryNameModule.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTransportClient.java similarity index 51% rename from core/src/main/java/org/elasticsearch/repositories/RepositoryNameModule.java rename to test/framework/src/main/java/org/elasticsearch/transport/MockTransportClient.java index 47be67df34c..a198ef77956 100644 --- a/core/src/main/java/org/elasticsearch/repositories/RepositoryNameModule.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTransportClient.java @@ -16,24 +16,25 @@ * specific language governing permissions and limitations * under the License. */ +package org.elasticsearch.transport; -package org.elasticsearch.repositories; +import org.elasticsearch.client.transport.TransportClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.common.inject.AbstractModule; +import java.util.Arrays; +import java.util.Collection; -/** - * Binds specific instance of RepositoryName for injection to repository module - */ -public class RepositoryNameModule extends AbstractModule { +@SuppressWarnings({"unchecked","varargs"}) +public class MockTransportClient extends TransportClient { + private static final Settings DEFAULT_SETTINGS = Settings.builder().put("transport.type.default", "local").build(); - private final RepositoryName repositoryName; - public RepositoryNameModule(RepositoryName repositoryName) { - this.repositoryName = repositoryName; + public MockTransportClient(Settings settings, Class... plugins) { + super(settings, DEFAULT_SETTINGS, Arrays.asList(plugins)); } - @Override - protected void configure() { - bind(RepositoryName.class).toInstance(repositoryName); + public MockTransportClient(Settings settings, Collection> plugins) { + super(settings, DEFAULT_SETTINGS, plugins); } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java index 128cb862e57..db41c42e16a 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/FileUtilsTests.java @@ -84,7 +84,8 @@ public class FileUtilsTests extends ESTestCase { assertSingleFile(yamlSuites.get(dir.getFileName().toString()), dir.getFileName().toString(), file.getFileName().toString()); //load from external file (optional extension) - yamlSuites = FileUtils.findYamlSuites(dir.getFileSystem(), "/rest-api-spec/test", dir.resolve("test_loading").toAbsolutePath().toString()); + yamlSuites = FileUtils.findYamlSuites(dir.getFileSystem(), "/rest-api-spec/test", + dir.resolve("test_loading").toAbsolutePath().toString()); assertThat(yamlSuites, notNullValue()); assertThat(yamlSuites.size(), equalTo(1)); assertThat(yamlSuites.containsKey(dir.getFileName().toString()), equalTo(true)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java deleted file mode 100644 index fefcd57af79..00000000000 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/JsonPathTests.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.test.rest.test; - -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.Stash; -import org.elasticsearch.test.rest.json.JsonPath; - -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class JsonPathTests extends ESTestCase { - public void testEvaluateObjectPathEscape() throws Exception { - String json = "{ \"field1\": { \"field2.field3\" : \"value2\" } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.field2\\.field3"); - assertThat(object, instanceOf(String.class)); - assertThat((String)object, equalTo("value2")); - } - - public void testEvaluateObjectPathWithDoubleDot() throws Exception { - String json = "{ \"field1\": { \"field2\" : \"value2\" } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1..field2"); - assertThat(object, instanceOf(String.class)); - assertThat((String)object, equalTo("value2")); - } - - public void testEvaluateObjectPathEndsWithDot() throws Exception { - String json = "{ \"field1\": { \"field2\" : \"value2\" } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.field2."); - assertThat(object, instanceOf(String.class)); - assertThat((String)object, equalTo("value2")); - } - - public void testEvaluateString() throws Exception { - String json = "{ \"field1\": { \"field2\" : \"value2\" } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.field2"); - assertThat(object, instanceOf(String.class)); - assertThat((String)object, equalTo("value2")); - } - - public void testEvaluateInteger() throws Exception { - String json = "{ \"field1\": { \"field2\" : 333 } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.field2"); - assertThat(object, instanceOf(Integer.class)); - assertThat((Integer)object, equalTo(333)); - } - - public void testEvaluateDouble() throws Exception { - String json = "{ \"field1\": { \"field2\" : 3.55 } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.field2"); - assertThat(object, instanceOf(Double.class)); - assertThat((Double)object, equalTo(3.55)); - } - - public void testEvaluateArray() throws Exception { - String json = "{ \"field1\": { \"array1\" : [ \"value1\", \"value2\" ] } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.array1"); - assertThat(object, instanceOf(List.class)); - List list = (List) object; - assertThat(list.size(), equalTo(2)); - assertThat(list.get(0), instanceOf(String.class)); - assertThat((String)list.get(0), equalTo("value1")); - assertThat(list.get(1), instanceOf(String.class)); - assertThat((String)list.get(1), equalTo("value2")); - } - - public void testEvaluateArrayElement() throws Exception { - String json = "{ \"field1\": { \"array1\" : [ \"value1\", \"value2\" ] } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.array1.1"); - assertThat(object, instanceOf(String.class)); - assertThat((String)object, equalTo("value2")); - } - - public void testEvaluateArrayElementObject() throws Exception { - String json = "{ \"field1\": { \"array1\" : [ {\"element\": \"value1\"}, {\"element\":\"value2\"} ] } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.array1.1.element"); - assertThat(object, instanceOf(String.class)); - assertThat((String)object, equalTo("value2")); - } - - public void testEvaluateArrayElementObjectWrongPath() throws Exception { - String json = "{ \"field1\": { \"array1\" : [ {\"element\": \"value1\"}, {\"element\":\"value2\"} ] } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("field1.array2.1.element"); - assertThat(object, nullValue()); - } - - @SuppressWarnings("unchecked") - public void testEvaluateObjectKeys() throws Exception { - String json = "{ \"metadata\": { \"templates\" : {\"template_1\": { \"field\" : \"value\"}, \"template_2\": { \"field\" : \"value\"} } } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate("metadata.templates"); - assertThat(object, instanceOf(Map.class)); - Map map = (Map)object; - assertThat(map.size(), equalTo(2)); - Set strings = map.keySet(); - assertThat(strings, contains("template_1", "template_2")); - } - - @SuppressWarnings("unchecked") - public void testEvaluateEmptyPath() throws Exception { - String json = "{ \"field1\": { \"array1\" : [ {\"element\": \"value1\"}, {\"element\":\"value2\"} ] } }"; - JsonPath jsonPath = new JsonPath(json); - Object object = jsonPath.evaluate(""); - assertThat(object, notNullValue()); - assertThat(object, instanceOf(Map.class)); - assertThat(((Map)object).containsKey("field1"), equalTo(true)); - } - - public void testEvaluateStashInPropertyName() throws Exception { - String json = "{ \"field1\": { \"elements\" : {\"element1\": \"value1\"}}}"; - JsonPath jsonPath = new JsonPath(json); - try { - jsonPath.evaluate("field1.$placeholder.element1"); - fail("evaluate should have failed due to unresolved placeholder"); - } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("stashed value not found for key [$placeholder]")); - } - - Stash stash = new Stash(); - stash.stashValue("placeholder", "elements"); - Object object = jsonPath.evaluate("field1.$placeholder.element1", stash); - assertThat(object, notNullValue()); - assertThat(object.toString(), equalTo("value1")); - } -} diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/ObjectPathTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/ObjectPathTests.java new file mode 100644 index 00000000000..1d99a73c767 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/ObjectPathTests.java @@ -0,0 +1,265 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test.rest.test; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.test.rest.Stash; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class ObjectPathTests extends ESTestCase { + + private static XContentBuilder randomXContentBuilder() throws IOException { + //only string based formats are supported, no cbor nor smile + XContentType xContentType = randomFrom(XContentType.JSON, XContentType.YAML); + return XContentBuilder.builder(XContentFactory.xContent(xContentType)); + } + + public void testEvaluateObjectPathEscape() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.field("field2.field3", "value2"); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("field1.field2\\.field3"); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("value2")); + } + + public void testEvaluateObjectPathWithDots() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.field("field2", "value2"); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("field1..field2"); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("value2")); + object = objectPath.evaluate("field1.field2."); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("value2")); + object = objectPath.evaluate("field1.field2"); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("value2")); + } + + public void testEvaluateInteger() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.field("field2", 333); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("field1.field2"); + assertThat(object, instanceOf(Integer.class)); + assertThat(object, equalTo(333)); + } + + public void testEvaluateDouble() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.field("field2", 3.55); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("field1.field2"); + assertThat(object, instanceOf(Double.class)); + assertThat(object, equalTo(3.55)); + } + + public void testEvaluateArray() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.array("array1", "value1", "value2"); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("field1.array1"); + assertThat(object, instanceOf(List.class)); + List list = (List) object; + assertThat(list.size(), equalTo(2)); + assertThat(list.get(0), instanceOf(String.class)); + assertThat(list.get(0), equalTo("value1")); + assertThat(list.get(1), instanceOf(String.class)); + assertThat(list.get(1), equalTo("value2")); + object = objectPath.evaluate("field1.array1.1"); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("value2")); + } + + @SuppressWarnings("unchecked") + public void testEvaluateArrayElementObject() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.startArray("array1"); + xContentBuilder.startObject(); + xContentBuilder.field("element", "value1"); + xContentBuilder.endObject(); + xContentBuilder.startObject(); + xContentBuilder.field("element", "value2"); + xContentBuilder.endObject(); + xContentBuilder.endArray(); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("field1.array1.1.element"); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("value2")); + object = objectPath.evaluate(""); + assertThat(object, notNullValue()); + assertThat(object, instanceOf(Map.class)); + assertThat(((Map)object).containsKey("field1"), equalTo(true)); + object = objectPath.evaluate("field1.array2.1.element"); + assertThat(object, nullValue()); + } + + @SuppressWarnings("unchecked") + public void testEvaluateObjectKeys() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("metadata"); + xContentBuilder.startObject("templates"); + xContentBuilder.startObject("template_1"); + xContentBuilder.field("field", "value"); + xContentBuilder.endObject(); + xContentBuilder.startObject("template_2"); + xContentBuilder.field("field", "value"); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + Object object = objectPath.evaluate("metadata.templates"); + assertThat(object, instanceOf(Map.class)); + Map map = (Map)object; + assertThat(map.size(), equalTo(2)); + Set strings = map.keySet(); + assertThat(strings, contains("template_1", "template_2")); + } + + public void testEvaluateStashInPropertyName() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startObject(); + xContentBuilder.startObject("field1"); + xContentBuilder.startObject("elements"); + xContentBuilder.field("element1", "value1"); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + xContentBuilder.endObject(); + ObjectPath objectPath = ObjectPath.createFromXContent(xContentBuilder.contentType().xContent(), xContentBuilder.string()); + try { + objectPath.evaluate("field1.$placeholder.element1"); + fail("evaluate should have failed due to unresolved placeholder"); + } catch(IllegalArgumentException e) { + assertThat(e.getMessage(), containsString("stashed value not found for key [placeholder]")); + } + + // Stashed value is whole property name + Stash stash = new Stash(); + stash.stashValue("placeholder", "elements"); + Object object = objectPath.evaluate("field1.$placeholder.element1", stash); + assertThat(object, notNullValue()); + assertThat(object.toString(), equalTo("value1")); + + // Stash key has dots + Map stashedObject = new HashMap<>(); + stashedObject.put("subobject", "elements"); + stash.stashValue("object", stashedObject); + object = objectPath.evaluate("field1.$object\\.subobject.element1", stash); + assertThat(object, notNullValue()); + assertThat(object.toString(), equalTo("value1")); + + // Stashed value is part of property name + stash.stashValue("placeholder", "ele"); + object = objectPath.evaluate("field1.${placeholder}ments.element1", stash); + assertThat(object, notNullValue()); + assertThat(object.toString(), equalTo("value1")); + + // Stashed value is inside of property name + stash.stashValue("placeholder", "le"); + object = objectPath.evaluate("field1.e${placeholder}ments.element1", stash); + assertThat(object, notNullValue()); + assertThat(object.toString(), equalTo("value1")); + + // Multiple stashed values in property name + stash.stashValue("placeholder", "le"); + stash.stashValue("placeholder2", "nts"); + object = objectPath.evaluate("field1.e${placeholder}me${placeholder2}.element1", stash); + assertThat(object, notNullValue()); + assertThat(object.toString(), equalTo("value1")); + + // Stashed value is part of property name and has dots + stashedObject.put("subobject", "ele"); + stash.stashValue("object", stashedObject); + object = objectPath.evaluate("field1.${object\\.subobject}ments.element1", stash); + assertThat(object, notNullValue()); + assertThat(object.toString(), equalTo("value1")); + } + + @SuppressWarnings("unchecked") + public void testEvaluateArrayAsRoot() throws Exception { + XContentBuilder xContentBuilder = randomXContentBuilder(); + xContentBuilder.startArray(); + xContentBuilder.startObject(); + xContentBuilder.field("alias", "test_alias1"); + xContentBuilder.field("index", "test1"); + xContentBuilder.endObject(); + xContentBuilder.startObject(); + xContentBuilder.field("alias", "test_alias2"); + xContentBuilder.field("index", "test2"); + xContentBuilder.endObject(); + xContentBuilder.endArray(); + ObjectPath objectPath = ObjectPath.createFromXContent(XContentFactory.xContent(XContentType.YAML), xContentBuilder.string()); + Object object = objectPath.evaluate(""); + assertThat(object, notNullValue()); + assertThat(object, instanceOf(List.class)); + assertThat(((List)object).size(), equalTo(2)); + object = objectPath.evaluate("0"); + assertThat(object, notNullValue()); + assertThat(object, instanceOf(Map.class)); + assertThat(((Map)object).get("alias"), equalTo("test_alias1")); + object = objectPath.evaluate("1.index"); + assertThat(object, notNullValue()); + assertThat(object, instanceOf(String.class)); + assertThat(object, equalTo("test2")); + } +} diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java index 298f230d64a..6b5cc3defb7 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/RestTestParserTests.java @@ -54,13 +54,30 @@ public class RestTestParserTests extends ESTestCase { parser.close(); } - public void testParseTestSetupAndSections() throws Exception { - parser = YamlXContent.yamlXContent.createParser( + public void testParseTestSetupTeardownAndSections() throws Exception { + final boolean includeSetup = randomBoolean(); + final boolean includeTeardown = randomBoolean(); + StringBuilder testSpecBuilder = new StringBuilder(); + if (includeSetup) { + testSpecBuilder + .append("---\n" + "setup:\n" + " - do:\n" + " indices.create:\n" + " index: test_index\n" + - "\n" + + "\n"); + } + if (includeTeardown) { + testSpecBuilder + .append("---\n" + + "teardown:\n" + + " - do:\n" + + " indices.delete:\n" + + " index: test_index\n" + + "\n"); + } + parser = YamlXContent.yamlXContent.createParser( + testSpecBuilder.toString() + "---\n" + "\"Get index mapping\":\n" + " - do:\n" + @@ -92,12 +109,30 @@ public class RestTestParserTests extends ESTestCase { assertThat(restTestSuite, notNullValue()); assertThat(restTestSuite.getName(), equalTo("suite")); assertThat(restTestSuite.getSetupSection(), notNullValue()); - assertThat(restTestSuite.getSetupSection().getSkipSection().isEmpty(), equalTo(true)); - - assertThat(restTestSuite.getSetupSection().getDoSections().size(), equalTo(1)); - assertThat(restTestSuite.getSetupSection().getDoSections().get(0).getApiCallSection().getApi(), equalTo("indices.create")); - assertThat(restTestSuite.getSetupSection().getDoSections().get(0).getApiCallSection().getParams().size(), equalTo(1)); - assertThat(restTestSuite.getSetupSection().getDoSections().get(0).getApiCallSection().getParams().get("index"), equalTo("test_index")); + if (includeSetup) { + assertThat(restTestSuite.getSetupSection().isEmpty(), equalTo(false)); + assertThat(restTestSuite.getSetupSection().getSkipSection().isEmpty(), equalTo(true)); + assertThat(restTestSuite.getSetupSection().getDoSections().size(), equalTo(1)); + assertThat(restTestSuite.getSetupSection().getDoSections().get(0).getApiCallSection().getApi(), equalTo("indices.create")); + assertThat(restTestSuite.getSetupSection().getDoSections().get(0).getApiCallSection().getParams().size(), equalTo(1)); + assertThat(restTestSuite.getSetupSection().getDoSections().get(0).getApiCallSection().getParams().get("index"), + equalTo("test_index")); + } else { + assertThat(restTestSuite.getSetupSection().isEmpty(), equalTo(true)); + } + + assertThat(restTestSuite.getTeardownSection(), notNullValue()); + if (includeTeardown) { + assertThat(restTestSuite.getTeardownSection().isEmpty(), equalTo(false)); + assertThat(restTestSuite.getTeardownSection().getSkipSection().isEmpty(), equalTo(true)); + assertThat(restTestSuite.getTeardownSection().getDoSections().size(), equalTo(1)); + assertThat(restTestSuite.getTeardownSection().getDoSections().get(0).getApiCallSection().getApi(), equalTo("indices.delete")); + assertThat(restTestSuite.getTeardownSection().getDoSections().get(0).getApiCallSection().getParams().size(), equalTo(1)); + assertThat(restTestSuite.getTeardownSection().getDoSections().get(0).getApiCallSection().getParams().get("index"), + equalTo("test_index")); + } else { + assertThat(restTestSuite.getTeardownSection().isEmpty(), equalTo(true)); + } assertThat(restTestSuite.getTestSections().size(), equalTo(2)); @@ -120,7 +155,8 @@ public class RestTestParserTests extends ESTestCase { assertThat(restTestSuite.getTestSections().get(1).getName(), equalTo("Get type mapping - pre 1.0")); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().isEmpty(), equalTo(false)); - assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getReason(), equalTo("for newer versions the index name is always returned")); + assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getReason(), + equalTo("for newer versions the index name is always returned")); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getUpperVersion(), equalTo(Version.CURRENT)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().size(), equalTo(3)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/StashTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/StashTests.java new file mode 100644 index 00000000000..7d0c0598f09 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/StashTests.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.rest.test; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.Stash; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.singletonMap; + +public class StashTests extends ESTestCase { + public void testReplaceStashedValuesEmbeddedStashKey() throws IOException { + Stash stash = new Stash(); + stash.stashValue("stashed", "bar"); + + Map expected = new HashMap<>(); + expected.put("key", singletonMap("a", "foobar")); + Map map = new HashMap<>(); + Map map2 = new HashMap<>(); + map2.put("a", "foo${stashed}"); + map.put("key", map2); + + Map actual = stash.replaceStashedValues(map); + assertEquals(expected, actual); + } +} diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/test/TeardownSectionParserTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/test/TeardownSectionParserTests.java new file mode 100644 index 00000000000..eeccea5f5e5 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/test/TeardownSectionParserTests.java @@ -0,0 +1,93 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test.rest.test; + +import org.elasticsearch.Version; +import org.elasticsearch.common.xcontent.yaml.YamlXContent; +import org.elasticsearch.test.rest.parser.RestTestSuiteParseContext; +import org.elasticsearch.test.rest.parser.TeardownSectionParser; +import org.elasticsearch.test.rest.section.TeardownSection; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; + +/** + * Unit tests for the teardown section parser + */ +public class TeardownSectionParserTests extends AbstractParserTestCase { + + public void testParseTeardownSection() throws Exception { + parser = YamlXContent.yamlXContent.createParser( + " - do:\n" + + " delete:\n" + + " index: foo\n" + + " type: doc\n" + + " id: 1\n" + + " ignore: 404\n" + + " - do:\n" + + " delete2:\n" + + " index: foo\n" + + " type: doc\n" + + " id: 1\n" + + " ignore: 404" + ); + + TeardownSectionParser teardownSectionParser = new TeardownSectionParser(); + TeardownSection section = teardownSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + + assertThat(section, notNullValue()); + assertThat(section.getSkipSection().isEmpty(), equalTo(true)); + assertThat(section.getDoSections().size(), equalTo(2)); + assertThat(section.getDoSections().get(0).getApiCallSection().getApi(), equalTo("delete")); + assertThat(section.getDoSections().get(1).getApiCallSection().getApi(), equalTo("delete2")); + } + + public void testParseWithSkip() throws Exception { + parser = YamlXContent.yamlXContent.createParser( + " - skip:\n" + + " version: \"2.0.0 - 2.3.0\"\n" + + " reason: \"there is a reason\"\n" + + " - do:\n" + + " delete:\n" + + " index: foo\n" + + " type: doc\n" + + " id: 1\n" + + " ignore: 404\n" + + " - do:\n" + + " delete2:\n" + + " index: foo\n" + + " type: doc\n" + + " id: 1\n" + + " ignore: 404" + ); + + TeardownSectionParser teardownSectionParser = new TeardownSectionParser(); + TeardownSection section = teardownSectionParser.parse(new RestTestSuiteParseContext("api", "suite", parser)); + + assertThat(section, notNullValue()); + assertThat(section.getSkipSection().isEmpty(), equalTo(false)); + assertThat(section.getSkipSection().getLowerVersion(), equalTo(Version.V_2_0_0)); + assertThat(section.getSkipSection().getUpperVersion(), equalTo(Version.V_2_3_0)); + assertThat(section.getSkipSection().getReason(), equalTo("there is a reason")); + assertThat(section.getDoSections().size(), equalTo(2)); + assertThat(section.getDoSections().get(0).getApiCallSection().getApi(), equalTo("delete")); + assertThat(section.getDoSections().get(1).getApiCallSection().getApi(), equalTo("delete2")); + } +} diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index 63c5eb01fb0..d4af031aa84 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -1,3 +1,4 @@ + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -24,7 +25,9 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.test.ESTestCase; @@ -75,10 +78,10 @@ public class InternalTestClusterTests extends ESTestCase { String nodePrefix = randomRealisticUnicodeOfCodepointLengthBetween(1, 10); Path baseDir = createTempDir(); - InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, masterNodes, + InternalTestCluster cluster0 = new InternalTestCluster(clusterSeed, baseDir, masterNodes, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList(), Function.identity()); - InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, masterNodes, + InternalTestCluster cluster1 = new InternalTestCluster(clusterSeed, baseDir, masterNodes, minNumDataNodes, maxNumDataNodes, clusterName, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList(), Function.identity()); // TODO: this is not ideal - we should have a way to make sure ports are initialized in the same way @@ -90,7 +93,7 @@ public class InternalTestClusterTests extends ESTestCase { * a set of settings that are expected to have different values betweem clusters, even they have been initialized with the same * base settins. */ - final static Set clusterUniqueSettings = new HashSet<>(); + static final Set clusterUniqueSettings = new HashSet<>(); static { clusterUniqueSettings.add(ClusterName.CLUSTER_NAME_SETTING.getKey()); @@ -125,22 +128,32 @@ public class InternalTestClusterTests extends ESTestCase { boolean masterNodes = randomBoolean(); int minNumDataNodes = randomIntBetween(0, 3); int maxNumDataNodes = randomIntBetween(minNumDataNodes, 4); - final String clusterName1 = "shared1";//clusterName("shared1", clusterSeed); - final String clusterName2 = "shared2";//clusterName("shared", Integer.toString(CHILD_JVM_ID), clusterSeed); - /*while (clusterName.equals(clusterName1)) { - clusterName1 = clusterName("shared", Integer.toString(CHILD_JVM_ID), clusterSeed); // spin until the time changes - }*/ - NodeConfigurationSource nodeConfigurationSource = NodeConfigurationSource.EMPTY; + final String clusterName1 = "shared1"; + final String clusterName2 = "shared2"; + NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { + @Override + public Settings nodeSettings(int nodeOrdinal) { + return Settings.builder() + .put(NetworkModule.HTTP_ENABLED.getKey(), false) + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local") + .put(NetworkModule.TRANSPORT_TYPE_KEY, "local").build(); + } + + @Override + public Settings transportClientSettings() { + return Settings.builder() + .put(NetworkModule.TRANSPORT_TYPE_KEY, "local").build(); + } + }; int numClientNodes = randomIntBetween(0, 2); boolean enableHttpPipelining = randomBoolean(); - int jvmOrdinal = randomIntBetween(0, 10); String nodePrefix = "foobar"; Path baseDir = createTempDir(); - InternalTestCluster cluster0 = new InternalTestCluster("local", clusterSeed, baseDir, masterNodes, + InternalTestCluster cluster0 = new InternalTestCluster(clusterSeed, baseDir, masterNodes, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList(), Function.identity()); - InternalTestCluster cluster1 = new InternalTestCluster("local", clusterSeed, baseDir, masterNodes, + InternalTestCluster cluster1 = new InternalTestCluster(clusterSeed, baseDir, masterNodes, minNumDataNodes, maxNumDataNodes, clusterName2, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList(), Function.identity()); @@ -177,12 +190,24 @@ public class InternalTestClusterTests extends ESTestCase { int minNumDataNodes = 2; int maxNumDataNodes = 2; final String clusterName1 = "shared1"; - NodeConfigurationSource nodeConfigurationSource = NodeConfigurationSource.EMPTY; - int numClientNodes = 0; + NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { + @Override + public Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(NetworkModule.HTTP_ENABLED.getKey(), false) + .put(NetworkModule.TRANSPORT_TYPE_KEY, "local") + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local") + .build(); + } + @Override + public Settings transportClientSettings() { + return Settings.builder() + .put(NetworkModule.TRANSPORT_TYPE_KEY, "local").build(); + } + }; int numClientNodes = randomIntBetween(0, 2); boolean enableHttpPipelining = randomBoolean(); String nodePrefix = "test"; Path baseDir = createTempDir(); - InternalTestCluster cluster = new InternalTestCluster("local", clusterSeed, baseDir, masterNodes, + InternalTestCluster cluster = new InternalTestCluster(clusterSeed, baseDir, masterNodes, minNumDataNodes, maxNumDataNodes, clusterName1, nodeConfigurationSource, numClientNodes, enableHttpPipelining, nodePrefix, Collections.emptyList(), Function.identity()); try { @@ -218,8 +243,7 @@ public class InternalTestClusterTests extends ESTestCase { assertFileNotExists(testMarker); // a new unknown node used this path, it should be cleaned assertFileExists(stableTestMarker); // but leaving the structure of existing, reused nodes for (String name: cluster.getNodeNames()) { - assertThat("data paths for " + name + " changed", getNodePaths(cluster, name), - equalTo(shardNodePaths.get(name))); + assertThat("data paths for " + name + " changed", getNodePaths(cluster, name), equalTo(shardNodePaths.get(name))); } cluster.beforeTest(random(), 0.0); @@ -245,16 +269,21 @@ public class InternalTestClusterTests extends ESTestCase { public void testDifferentRolesMaintainPathOnRestart() throws Exception { final Path baseDir = createTempDir(); - InternalTestCluster cluster = new InternalTestCluster("local", randomLong(), baseDir, true, 0, 0, "test", + InternalTestCluster cluster = new InternalTestCluster(randomLong(), baseDir, true, 0, 0, "test", new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { - return Settings.builder().put(DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0).build(); + return Settings.builder() + .put(NetworkModule.HTTP_ENABLED.getKey(), false) + .put(NetworkModule.TRANSPORT_TYPE_KEY, "local") + .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "local") + .put(DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0).build(); } @Override public Settings transportClientSettings() { - return Settings.EMPTY; + return Settings.builder() + .put(NetworkModule.TRANSPORT_TYPE_KEY, "local").build(); } }, 0, randomBoolean(), "", Collections.emptyList(), Function.identity()); cluster.beforeTest(random(), 0.0); diff --git a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java new file mode 100644 index 00000000000..f94bd11b00c --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java @@ -0,0 +1,39 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport; + +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.transport.MockTransportService; + +public class MockTcpTransportTests extends AbstractSimpleTransportTestCase { + @Override + protected MockTransportService build(Settings settings, Version version) { + NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(); + Transport transport = new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE, + new NoneCircuitBreakerService(), namedWriteableRegistry, new NetworkService(settings), version); + MockTransportService mockTransportService = new MockTransportService(Settings.EMPTY, transport, threadPool); + mockTransportService.start(); + return mockTransportService; + } +}