diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index f43481f9af4..ca78157bcf2 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -400,7 +400,7 @@ class BuildPlugin implements Plugin { // we use './temp' since this is per JVM and tests are forbidden from writing to CWD systemProperty 'java.io.tmpdir', './temp' systemProperty 'java.awt.headless', 'true' - systemProperty 'tests.maven', 'true' // TODO: rename this once we've switched to gradle! + systemProperty 'tests.gradle', 'true' systemProperty 'tests.artifact', project.name systemProperty 'tests.task', path systemProperty 'tests.security.manager', 'true' diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 435008132c7..9b070f71c20 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -67,7 +67,6 @@ public class PluginBuildPlugin extends BuildPlugin { provided "com.vividsolutions:jts:${project.versions.jts}" provided "log4j:log4j:${project.versions.log4j}" provided "log4j:apache-log4j-extras:${project.versions.log4j}" - provided "org.slf4j:slf4j-api:${project.versions.slf4j}" provided "net.java.dev.jna:jna:${project.versions.jna}" } } @@ -101,11 +100,6 @@ public class PluginBuildPlugin extends BuildPlugin { from pluginMetadata // metadata (eg custom security policy) from project.jar // this plugin's jar from project.configurations.runtime - project.configurations.provided // the dep jars - // hack just for slf4j, in case it is "upgrade" from provided to compile, - // since it is not actually provided in distributions - from project.configurations.runtime.fileCollection { Dependency dep -> - return dep.name == 'slf4j-api' && project.configurations.compile.dependencies.contains(dep) - } // extra files for the plugin to go into the zip from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging from('src/main') { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 9d205869881..d96ee511051 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -245,7 +245,8 @@ class ClusterFormationTasks { return setup } Copy copyConfig = project.tasks.create(name: name, type: Copy, dependsOn: setup) - copyConfig.into(new File(node.homeDir, 'config')) // copy must always have a general dest dir, even though we don't use it + File configDir = new File(node.homeDir, 'config') + copyConfig.into(configDir) // copy must always have a general dest dir, even though we don't use it for (Map.Entry extraConfigFile : node.config.extraConfigFiles.entrySet()) { copyConfig.doFirst { // make sure the copy won't be a no-op or act on a directory @@ -258,9 +259,12 @@ class ClusterFormationTasks { } } File destConfigFile = new File(node.homeDir, 'config/' + extraConfigFile.getKey()) - copyConfig.into(destConfigFile.canonicalFile.parentFile) - .from({ extraConfigFile.getValue() }) // wrap in closure to delay resolution to execution time - .rename { destConfigFile.name } + // wrap source file in closure to delay resolution to execution time + copyConfig.from({ extraConfigFile.getValue() }) { + // this must be in a closure so it is only applied to the single file specified in from above + into(configDir.toPath().relativize(destConfigFile.canonicalFile.parentFile.toPath()).toFile()) + rename { destConfigFile.name } + } } return copyConfig } diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 6c04effc58c..23e6acd9785 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -374,9 +374,6 @@ - - - diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 3ca58b7bee1..3c2c86c502a 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -4,7 +4,7 @@ lucene = 5.5.0 # optional dependencies spatial4j = 0.5 jts = 1.13 -jackson = 2.6.2 +jackson = 2.7.1 log4j = 1.2.17 slf4j = 1.6.2 jna = 4.1.0 @@ -13,6 +13,8 @@ jna = 4.1.0 # test dependencies randomizedrunner = 2.3.2 junit = 4.11 +# TODO: Upgrade httpclient to a version > 4.5.1 once released. Then remove o.e.test.rest.client.StrictHostnameVerifier* and use +# DefaultHostnameVerifier instead since we no longer need to workaround https://issues.apache.org/jira/browse/HTTPCLIENT-1698 httpclient = 4.3.6 httpcore = 4.3.3 commonslogging = 1.1.3 diff --git a/core/build.gradle b/core/build.gradle index e1511a9cdd1..ac3f421211d 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -77,7 +77,6 @@ dependencies { // logging compile "log4j:log4j:${versions.log4j}", optional compile "log4j:apache-log4j-extras:${versions.log4j}", optional - compile "org.slf4j:slf4j-api:${versions.slf4j}", optional compile "net.java.dev.jna:jna:${versions.jna}", optional @@ -224,8 +223,9 @@ thirdPartyAudit.excludes = [ 'org.osgi.util.tracker.ServiceTracker', 'org.osgi.util.tracker.ServiceTrackerCustomizer', - 'org.slf4j.impl.StaticMDCBinder', - 'org.slf4j.impl.StaticMarkerBinder', + // from org.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', ] // dependency license are currently checked in distribution diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index dbfebd9aa8b..d069bddfdfe 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -23,7 +23,7 @@ import org.elasticsearch.cluster.action.shard.ShardStateAction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.support.LoggerMessageFormat; +import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.Index; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequest.java index 978e647b999..1d1249e1551 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksRequest.java @@ -36,14 +36,6 @@ public class CancelTasksRequest extends BaseTasksRequest { private String reason = DEFAULT_REASON; - /** - * Cancel tasks on the specified nodes. If none are passed, all cancellable tasks on - * all nodes will be cancelled. - */ - public CancelTasksRequest(String... nodesIds) { - super(nodesIds); - } - @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -54,7 +46,6 @@ public class CancelTasksRequest extends BaseTasksRequest { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(reason); - } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index 73e382c4cd6..b07e540d792 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskInfo; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.tasks.BaseTasksRequest; import org.elasticsearch.action.support.tasks.TransportTasksAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterService; @@ -36,6 +35,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -84,9 +84,9 @@ public class TransportCancelTasksAction extends TransportTasksAction operation) { - if (request.taskId() != BaseTasksRequest.ALL_TASKS) { + if (request.taskId().isSet() == false) { // we are only checking one task, we can optimize it - CancellableTask task = taskManager.getCancellableTask(request.taskId()); + CancellableTask task = taskManager.getCancellableTask(request.taskId().getId()); if (task != null) { if (request.match(task)) { operation.accept(task); @@ -94,7 +94,7 @@ public class TransportCancelTasksAction extends TransportTasksAction nodes, BanLock banLock) { - sendSetBanRequest(nodes, new BanParentTaskRequest(clusterService.localNode().getId(), task.getId(), reason), banLock); + sendSetBanRequest(nodes, + BanParentTaskRequest.createSetBanParentTaskRequest(new TaskId(clusterService.localNode().getId(), task.getId()), reason), + banLock); } private void removeBanOnNodes(CancellableTask task, Set nodes) { - sendRemoveBanRequest(nodes, new BanParentTaskRequest(clusterService.localNode().getId(), task.getId())); + sendRemoveBanRequest(nodes, + BanParentTaskRequest.createRemoveBanParentTaskRequest(new TaskId(clusterService.localNode().getId(), task.getId()))); } private void sendSetBanRequest(Set nodes, BanParentTaskRequest request, BanLock banLock) { @@ -148,8 +151,8 @@ public class TransportCancelTasksAction extends TransportTasksAction { private boolean detailed = false; - /** - * Get information from nodes based on the nodes ids specified. If none are passed, information - * for all nodes will be returned. - */ - public ListTasksRequest(String... nodesIds) { - super(nodesIds); - } - /** * Should the detailed task information be returned. */ @@ -48,7 +40,7 @@ public class ListTasksRequest extends BaseTasksRequest { } /** - * Should the node settings be returned. + * Should the detailed task information be returned. */ public ListTasksRequest detailed(boolean detailed) { this.detailed = detailed; diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java index 1a14a527150..3ad4299e38f 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksResponse.java @@ -138,11 +138,13 @@ public class ListTasksResponse extends BaseTasksResponse implements ToXContent { } builder.endObject(); } - builder.startArray("tasks"); + builder.startObject("tasks"); for(TaskInfo task : entry.getValue()) { + builder.startObject(task.getTaskId().toString(), XContentBuilder.FieldCaseConversion.NONE); task.toXContent(builder, params); + builder.endObject(); } - builder.endArray(); + builder.endObject(); builder.endObject(); } builder.endObject(); diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TaskInfo.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TaskInfo.java index 33ed9914077..d71c576093e 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TaskInfo.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TaskInfo.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import java.io.IOException; @@ -41,7 +42,7 @@ public class TaskInfo implements Writeable, ToXContent { private final DiscoveryNode node; - private final long id; + private final TaskId taskId; private final String type; @@ -51,28 +52,21 @@ public class TaskInfo implements Writeable, ToXContent { private final Task.Status status; - private final String parentNode; + private final TaskId parentTaskId; - private final long parentId; - - public TaskInfo(DiscoveryNode node, long id, String type, String action, String description, Task.Status status) { - this(node, id, type, action, description, status, null, -1L); - } - - public TaskInfo(DiscoveryNode node, long id, String type, String action, String description, Task.Status status, String parentNode, long parentId) { + public TaskInfo(DiscoveryNode node, long id, String type, String action, String description, Task.Status status, TaskId parentTaskId) { this.node = node; - this.id = id; + this.taskId = new TaskId(node.getId(), id); this.type = type; this.action = action; this.description = description; this.status = status; - this.parentNode = parentNode; - this.parentId = parentId; + this.parentTaskId = parentTaskId; } public TaskInfo(StreamInput in) throws IOException { node = DiscoveryNode.readNode(in); - id = in.readLong(); + taskId = new TaskId(node.getId(), in.readLong()); type = in.readString(); action = in.readString(); description = in.readOptionalString(); @@ -81,8 +75,11 @@ public class TaskInfo implements Writeable, ToXContent { } else { status = null; } - parentNode = in.readOptionalString(); - parentId = in.readLong(); + parentTaskId = new TaskId(in); + } + + public TaskId getTaskId() { + return taskId; } public DiscoveryNode getNode() { @@ -90,7 +87,7 @@ public class TaskInfo implements Writeable, ToXContent { } public long getId() { - return id; + return taskId.getId(); } public String getType() { @@ -113,12 +110,8 @@ public class TaskInfo implements Writeable, ToXContent { return status; } - public String getParentNode() { - return parentNode; - } - - public long getParentId() { - return parentId; + public TaskId getParentTaskId() { + return parentTaskId; } @Override @@ -129,7 +122,7 @@ public class TaskInfo implements Writeable, ToXContent { @Override public void writeTo(StreamOutput out) throws IOException { node.writeTo(out); - out.writeLong(id); + out.writeLong(taskId.getId()); out.writeString(type); out.writeString(action); out.writeOptionalString(description); @@ -139,15 +132,13 @@ public class TaskInfo implements Writeable, ToXContent { } else { out.writeBoolean(false); } - out.writeOptionalString(parentNode); - out.writeLong(parentId); + parentTaskId.writeTo(out); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); builder.field("node", node.getId()); - builder.field("id", id); + builder.field("id", taskId.getId()); builder.field("type", type); builder.field("action", action); if (status != null) { @@ -156,11 +147,9 @@ public class TaskInfo implements Writeable, ToXContent { if (description != null) { builder.field("description", description); } - if (parentNode != null) { - builder.field("parent_node", parentNode); - builder.field("parent_id", parentId); + if (parentTaskId.isSet() == false) { + builder.field("parent_task_id", parentTaskId.toString()); } - builder.endObject(); return builder; } } diff --git a/core/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsRequest.java b/core/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsRequest.java index 09411b56e25..4404cdafecc 100644 --- a/core/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/fieldstats/FieldStatsRequest.java @@ -62,7 +62,7 @@ public class FieldStatsRequest extends BroadcastRequest { public void setIndexConstraints(IndexConstraint[] indexConstraints) { if (indexConstraints == null) { - throw new NullPointerException("specified index_contraints can't be null"); + throw new NullPointerException("specified index_constraints can't be null"); } this.indexConstraints = indexConstraints; } diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 4eec61b3a63..07584772f66 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -52,7 +52,6 @@ public class TransportSearchAction extends HandledTransportAction listener) { // optimize search type for cases where there is only one shard group to search on - if (optimizeSingleShard) { - try { - ClusterState clusterState = clusterService.state(); - String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, searchRequest); - Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices()); - int shardCount = clusterService.operationRouting().searchShardsCount(clusterState, concreteIndices, routingMap); - if (shardCount == 1) { - // if we only have one group, then we always want Q_A_F, no need for DFS, and no need to do THEN since we hit one shard - searchRequest.searchType(QUERY_AND_FETCH); - } - } catch (IndexNotFoundException | IndexClosedException e) { - // ignore these failures, we will notify the search response if its really the case from the actual action - } catch (Exception e) { - logger.debug("failed to optimize search type, continue as normal", e); + try { + ClusterState clusterState = clusterService.state(); + String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, searchRequest); + Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices()); + int shardCount = clusterService.operationRouting().searchShardsCount(clusterState, concreteIndices, routingMap); + if (shardCount == 1) { + // if we only have one group, then we always want Q_A_F, no need for DFS, and no need to do THEN since we hit one shard + searchRequest.searchType(QUERY_AND_FETCH); } + } catch (IndexNotFoundException | IndexClosedException e) { + // ignore these failures, we will notify the search response if its really the case from the actual action + } catch (Exception e) { + logger.debug("failed to optimize search type, continue as normal", e); } if (searchRequest.searchType() == DFS_QUERY_THEN_FETCH) { dfsQueryThenFetchAction.execute(searchRequest, listener); diff --git a/core/src/main/java/org/elasticsearch/action/support/ChildTaskActionRequest.java b/core/src/main/java/org/elasticsearch/action/support/ChildTaskActionRequest.java index 4e3800f7232..a1b749fcc17 100644 --- a/core/src/main/java/org/elasticsearch/action/support/ChildTaskActionRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/ChildTaskActionRequest.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import java.io.IOException; @@ -31,40 +32,35 @@ import java.io.IOException; */ public abstract class ChildTaskActionRequest> extends ActionRequest { - private String parentTaskNode; - - private long parentTaskId; + private TaskId parentTaskId = TaskId.EMPTY_TASK_ID; protected ChildTaskActionRequest() { } public void setParentTask(String parentTaskNode, long parentTaskId) { - this.parentTaskNode = parentTaskNode; - this.parentTaskId = parentTaskId; + this.parentTaskId = new TaskId(parentTaskNode, parentTaskId); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - parentTaskNode = in.readOptionalString(); - parentTaskId = in.readLong(); + parentTaskId = new TaskId(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeOptionalString(parentTaskNode); - out.writeLong(parentTaskId); + parentTaskId.writeTo(out); } @Override public final Task createTask(long id, String type, String action) { - return createTask(id, type, action, parentTaskNode, parentTaskId); + return createTask(id, type, action, parentTaskId); } - public Task createTask(long id, String type, String action, String parentTaskNode, long parentTaskId) { - return new Task(id, type, action, getDescription(), parentTaskNode, parentTaskId); + public Task createTask(long id, String type, String action, TaskId parentTaskId) { + return new Task(id, type, action, getDescription(), parentTaskId); } } diff --git a/core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java b/core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java index 361ba6013f9..2a84327a820 100644 --- a/core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/ChildTaskRequest.java @@ -22,6 +22,7 @@ package org.elasticsearch.action.support; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportRequest; import java.io.IOException; @@ -31,38 +32,33 @@ import java.io.IOException; */ public class ChildTaskRequest extends TransportRequest { - private String parentTaskNode; - - private long parentTaskId; + private TaskId parentTaskId = TaskId.EMPTY_TASK_ID; protected ChildTaskRequest() { } public void setParentTask(String parentTaskNode, long parentTaskId) { - this.parentTaskNode = parentTaskNode; - this.parentTaskId = parentTaskId; + this.parentTaskId = new TaskId(parentTaskNode, parentTaskId); } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - parentTaskNode = in.readOptionalString(); - parentTaskId = in.readLong(); + parentTaskId = new TaskId(in); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - out.writeOptionalString(parentTaskNode); - out.writeLong(parentTaskId); + parentTaskId.writeTo(out); } @Override public final Task createTask(long id, String type, String action) { - return createTask(id, type, action, parentTaskNode, parentTaskId); + return createTask(id, type, action, parentTaskId); } - public Task createTask(long id, String type, String action, String parentTaskNode, long parentTaskId) { - return new Task(id, type, action, getDescription(), parentTaskNode, parentTaskId); + public Task createTask(long id, String type, String action, TaskId parentTaskId) { + return new Task(id, type, action, getDescription(), parentTaskId); } } diff --git a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java index 337ded6fb56..6283e69a02e 100644 --- a/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java +++ b/core/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequest.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import java.io.IOException; import java.util.concurrent.TimeUnit; @@ -186,8 +187,8 @@ public abstract class ReplicationRequest> extends private String[] actions = ALL_ACTIONS; - private String parentNode; + private TaskId parentTaskId = TaskId.EMPTY_TASK_ID; - private long parentTaskId = ALL_TASKS; - - private long taskId = ALL_TASKS; + private TaskId taskId = TaskId.EMPTY_TASK_ID; public BaseTasksRequest() { } @Override public ActionRequestValidationException validate() { - return null; - } - - /** - * Get information about tasks from nodes based on the nodes ids specified. - * If none are passed, information for all nodes will be returned. - */ - public BaseTasksRequest(String... nodesIds) { - this.nodesIds = nodesIds; + ActionRequestValidationException validationException = null; + if (taskId.isSet() == false && nodesIds.length > 0) { + validationException = addValidationError("task id cannot be used together with node ids", + validationException); + } + return validationException; } /** @@ -100,39 +98,26 @@ public class BaseTasksRequest> extends * * By default tasks with any ids are returned. */ - public long taskId() { + public TaskId taskId() { return taskId; } @SuppressWarnings("unchecked") - public final Request taskId(long taskId) { + public final Request taskId(TaskId taskId) { this.taskId = taskId; return (Request) this; } - /** - * Returns the parent node id that tasks should be filtered by - */ - public String parentNode() { - return parentNode; - } - - @SuppressWarnings("unchecked") - public Request parentNode(String parentNode) { - this.parentNode = parentNode; - return (Request) this; - } - /** * Returns the parent task id that tasks should be filtered by */ - public long parentTaskId() { + public TaskId parentTaskId() { return parentTaskId; } @SuppressWarnings("unchecked") - public Request parentTaskId(long parentTaskId) { + public Request parentTaskId(TaskId parentTaskId) { this.parentTaskId = parentTaskId; return (Request) this; } @@ -157,11 +142,10 @@ public class BaseTasksRequest> extends @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); + taskId = new TaskId(in); + parentTaskId = new TaskId(in); nodesIds = in.readStringArray(); - taskId = in.readLong(); actions = in.readStringArray(); - parentNode = in.readOptionalString(); - parentTaskId = in.readLong(); if (in.readBoolean()) { timeout = TimeValue.readTimeValue(in); } @@ -170,11 +154,10 @@ public class BaseTasksRequest> extends @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); + taskId.writeTo(out); + parentTaskId.writeTo(out); out.writeStringArrayNullable(nodesIds); - out.writeLong(taskId); out.writeStringArrayNullable(actions); - out.writeOptionalString(parentNode); - out.writeLong(parentTaskId); out.writeOptionalStreamable(timeout); } @@ -182,18 +165,13 @@ public class BaseTasksRequest> extends if (actions() != null && actions().length > 0 && Regex.simpleMatch(actions(), task.getAction()) == false) { return false; } - if (taskId() != ALL_TASKS) { - if(taskId() != task.getId()) { + if (taskId().isSet() == false) { + if(taskId().getId() != task.getId()) { return false; } } - if (parentNode() != null) { - if (parentNode().equals(task.getParentNode()) == false) { - return false; - } - } - if (parentTaskId() != ALL_TASKS) { - if (parentTaskId() != task.getParentId()) { + if (parentTaskId.isSet() == false) { + if (parentTaskId.equals(task.getParentTaskId()) == false) { return false; } } diff --git a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index 77915f7d0c9..54d48143577 100644 --- a/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/core/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -124,13 +124,17 @@ public abstract class TransportTasksAction< } protected String[] resolveNodes(TasksRequest request, ClusterState clusterState) { - return clusterState.nodes().resolveNodesIds(request.nodesIds()); + if (request.taskId().isSet()) { + return clusterState.nodes().resolveNodesIds(request.nodesIds()); + } else { + return new String[]{request.taskId().getNodeId()}; + } } protected void processTasks(TasksRequest request, Consumer operation) { - if (request.taskId() != BaseTasksRequest.ALL_TASKS) { + if (request.taskId().isSet() == false) { // we are only checking one task, we can optimize it - Task task = taskManager.getTask(request.taskId()); + Task task = taskManager.getTask(request.taskId().getId()); if (task != null) { if (request.match(task)) { operation.accept((OperationTask) task); @@ -143,13 +147,14 @@ public abstract class TransportTasksAction< } else { for (Task task : taskManager.getTasks().values()) { if (request.match(task)) { - operation.accept((OperationTask)task); + operation.accept((OperationTask) task); } } } } - protected abstract TasksResponse newResponse(TasksRequest request, List tasks, List taskOperationFailures, List failedNodeExceptions); + protected abstract TasksResponse newResponse(TasksRequest request, List tasks, List + taskOperationFailures, List failedNodeExceptions); @SuppressWarnings("unchecked") protected TasksResponse newResponse(TasksRequest request, AtomicReferenceArray responses) { @@ -232,34 +237,36 @@ public abstract class TransportTasksAction< onFailure(idx, nodeId, new NoSuchNodeException(nodeId)); } else if (!clusterService.localNode().shouldConnectTo(node) && !clusterService.localNode().equals(node)) { // the check "!clusterService.localNode().equals(node)" is to maintain backward comp. where before - // we allowed to connect from "local" client node to itself, certain tests rely on it, if we remove it, we need to fix + // we allowed to connect from "local" client node to itself, certain tests rely on it, if we remove it, we + // need to fix // those (and they randomize the client node usage, so tricky to find when) onFailure(idx, nodeId, new NodeShouldNotConnectException(clusterService.localNode(), node)); } else { NodeTaskRequest nodeRequest = new NodeTaskRequest(request); nodeRequest.setParentTask(clusterService.localNode().id(), task.getId()); taskManager.registerChildTask(task, node.getId()); - transportService.sendRequest(node, transportNodeAction, nodeRequest, builder.build(), new BaseTransportResponseHandler() { - @Override - public NodeTasksResponse newInstance() { - return new NodeTasksResponse(); - } + transportService.sendRequest(node, transportNodeAction, nodeRequest, builder.build(), + new BaseTransportResponseHandler() { + @Override + public NodeTasksResponse newInstance() { + return new NodeTasksResponse(); + } - @Override - public void handleResponse(NodeTasksResponse response) { - onOperation(idx, response); - } + @Override + public void handleResponse(NodeTasksResponse response) { + onOperation(idx, response); + } - @Override - public void handleException(TransportException exp) { - onFailure(idx, node.id(), exp); - } + @Override + public void handleException(TransportException exp) { + onFailure(idx, node.id(), exp); + } - @Override - public String executor() { - return ThreadPool.Names.SAME; - } - }); + @Override + public String executor() { + return ThreadPool.Names.SAME; + } + }); } } catch (Throwable t) { onFailure(idx, nodeId, t); diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 0d95edadb17..ec4e5ba2421 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -29,10 +29,9 @@ import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.cli.CliTool; import org.elasticsearch.common.cli.Terminal; import org.elasticsearch.common.inject.CreationException; -import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.logging.ESLogger; +import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.logging.log4j.LogConfigurator; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Security.java b/core/src/main/java/org/elasticsearch/bootstrap/Security.java index b9d2bfda24a..974be9aba85 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -98,7 +98,7 @@ import java.util.Map; *

* When running tests you have to pass it to the test runner like this: *

- * mvn test -Dtests.jvm.argline="-Djava.security.debug=access,failure" ...
+ * gradle test -Dtests.jvm.argline="-Djava.security.debug=access,failure" ...
  * 
* See * Troubleshooting Security for information. diff --git a/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java b/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java index ffd36790cfa..d7c76906f91 100644 --- a/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java +++ b/core/src/main/java/org/elasticsearch/client/ClusterAdminClient.java @@ -272,7 +272,7 @@ public interface ClusterAdminClient extends ElasticsearchClient { * * @param request The nodes tasks request * @return The result future - * @see org.elasticsearch.client.Requests#listTasksRequest(String...) + * @see org.elasticsearch.client.Requests#listTasksRequest() */ ActionFuture listTasks(ListTasksRequest request); @@ -281,7 +281,7 @@ public interface ClusterAdminClient extends ElasticsearchClient { * * @param request The nodes tasks request * @param listener A listener to be notified with a result - * @see org.elasticsearch.client.Requests#listTasksRequest(String...) + * @see org.elasticsearch.client.Requests#listTasksRequest() */ void listTasks(ListTasksRequest request, ActionListener listener); @@ -295,7 +295,7 @@ public interface ClusterAdminClient extends ElasticsearchClient { * * @param request The nodes tasks request * @return The result future - * @see org.elasticsearch.client.Requests#cancelTasksRequest(String...) + * @see org.elasticsearch.client.Requests#cancelTasksRequest() */ ActionFuture cancelTasks(CancelTasksRequest request); @@ -304,7 +304,7 @@ public interface ClusterAdminClient extends ElasticsearchClient { * * @param request The nodes tasks request * @param listener A cancelener to be notified with a result - * @see org.elasticsearch.client.Requests#cancelTasksRequest(String...) + * @see org.elasticsearch.client.Requests#cancelTasksRequest() */ void cancelTasks(CancelTasksRequest request, ActionListener listener); diff --git a/core/src/main/java/org/elasticsearch/client/Requests.java b/core/src/main/java/org/elasticsearch/client/Requests.java index c3dd77a3e44..3cf4f3dc6cb 100644 --- a/core/src/main/java/org/elasticsearch/client/Requests.java +++ b/core/src/main/java/org/elasticsearch/client/Requests.java @@ -419,23 +419,11 @@ public class Requests { /** * Creates a nodes tasks request against one or more nodes. Pass null or an empty array for all nodes. * - * @param nodesIds The nodes ids to get the tasks for - * @return The nodes tasks request - * @see org.elasticsearch.client.ClusterAdminClient#listTasks(ListTasksRequest) - */ - public static ListTasksRequest listTasksRequest(String... nodesIds) { - return new ListTasksRequest(nodesIds); - } - - /** - * Creates a nodes tasks request against one or more nodes. Pass null or an empty array for all nodes. - * - * @param nodesIds The nodes ids to cancel the tasks on * @return The nodes tasks request * @see org.elasticsearch.client.ClusterAdminClient#cancelTasks(CancelTasksRequest) */ - public static CancelTasksRequest cancelTasksRequest(String... nodesIds) { - return new CancelTasksRequest(nodesIds); + public static CancelTasksRequest cancelTasksRequest() { + return new CancelTasksRequest(); } /** diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 79bc7a142b1..87a8ca53efd 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.indices.breaker.CircuitBreakerModule; @@ -155,7 +154,10 @@ public class TransportClient extends AbstractClient { pluginsService.processModules(modules); Injector injector = modules.createInjector(); - injector.getInstance(TransportService.class).start(); + final TransportService transportService = injector.getInstance(TransportService.class); + transportService.start(); + transportService.acceptIncomingRequests(); + TransportClient transportClient = new TransportClient(injector); success = true; return transportClient; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java index 5e2a560a945..bda0a24c9a4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/IndexShardRoutingTable.java @@ -366,7 +366,7 @@ public class IndexShardRoutingTable implements Iterable { } } if (ordered.isEmpty()) { - throw new IllegalArgumentException("No data node with critera [" + nodeAttribute + "] found"); + throw new IllegalArgumentException("No data node with criteria [" + nodeAttribute + "] found"); } return new PlainShardIterator(shardId, ordered); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index c1a5f3ff208..4f2f9d06097 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -458,7 +458,7 @@ public class RoutingNodes implements Iterable { */ public void started(ShardRouting shard) { ensureMutable(); - assert !shard.active() : "expected an intializing shard " + shard; + assert !shard.active() : "expected an initializing shard " + shard; if (shard.relocatingNodeId() == null) { // if this is not a target shard for relocation, we need to update statistics inactiveShardCount--; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index e12020cfa74..cd75f897719 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -715,7 +715,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards } } if (logger.isTraceEnabled()) { - logger.trace("No eligable node found to assign shard [{}] decision [{}]", shard, decision.type()); + logger.trace("No eligible node found to assign shard [{}] decision [{}]", shard, decision.type()); } } else if (logger.isTraceEnabled()) { logger.trace("No Node found to assign shard [{}]", shard); diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java index 7298939d015..ce459662009 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamInput.java @@ -72,16 +72,22 @@ import java.util.function.Supplier; import static org.elasticsearch.ElasticsearchException.readException; import static org.elasticsearch.ElasticsearchException.readStackTrace; +/** + * A stream from this node to another node. Technically, it can also be streamed to a byte array but that is mostly for testing. + */ public abstract class StreamInput extends InputStream { - private Version version = Version.CURRENT; - protected StreamInput() { } - + /** + * The version of the node on the other side of this stream. + */ public Version getVersion() { return this.version; } + /** + * Set the version of the node on the other side of this stream. + */ public void setVersion(Version version) { this.version = version; } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java index 84d8cef1e81..95ec0fec292 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamOutput.java @@ -63,19 +63,24 @@ import java.util.List; import java.util.Map; /** - * + * A stream from another node to this node. Technically, it can also be streamed from a byte array but that is mostly for testing. */ public abstract class StreamOutput extends OutputStream { private Version version = Version.CURRENT; + /** + * The version of the node on the other side of this stream. + */ public Version getVersion() { return this.version; } - public StreamOutput setVersion(Version version) { + /** + * Set the version of the node on the other side of this stream. + */ + public void setVersion(Version version) { this.version = version; - return this; } public long position() throws IOException { diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/Streamable.java b/core/src/main/java/org/elasticsearch/common/io/stream/Streamable.java index f2b77113e14..4added7df31 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/Streamable.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/Streamable.java @@ -22,11 +22,26 @@ package org.elasticsearch.common.io.stream; import java.io.IOException; /** + * Implementers can be written to a {@linkplain StreamOutput} and read from a {@linkplain StreamInput}. This allows them to be "thrown + * across the wire" using Elasticsearch's internal protocol. If the implementer also implements equals and hashCode then a copy made by + * serializing and deserializing must be equal and have the same hashCode. It isn't required that such a copy be entirely unchanged. For + * example, {@link org.elasticsearch.common.unit.TimeValue} converts the time to nanoseconds for serialization. * + * Prefer implementing {@link Writeable} over implementing this interface where possible. Lots of code depends on this interface so this + * isn't always possible. + * + * Implementers of this interface almost always declare a no arg constructor that is exclusively used for creating "empty" objects on which + * you then call {@link #readFrom(StreamInput)}. Because {@linkplain #readFrom(StreamInput)} isn't part of the constructor the fields + * on implementers cannot be final. It is these reasons that this interface has fallen out of favor compared to {@linkplain Writeable}. */ public interface Streamable { - + /** + * Set this object's fields from a {@linkplain StreamInput}. + */ void readFrom(StreamInput in) throws IOException; + /** + * Write this object's fields to a {@linkplain StreamOutput}. + */ void writeTo(StreamOutput out) throws IOException; } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/StreamableReader.java b/core/src/main/java/org/elasticsearch/common/io/stream/StreamableReader.java index 28e2175f4ce..6bb1c5653f3 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/StreamableReader.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/StreamableReader.java @@ -20,11 +20,17 @@ package org.elasticsearch.common.io.stream; import java.io.IOException; +/** + * Implementers can be read from {@linkplain StreamInput} by calling their {@link #readFrom(StreamInput)} method. + * + * It is common for implementers of this interface to declare a public static final instance of themselves named PROTOTYPE so + * users can call {@linkplain #readFrom(StreamInput)} on it. It is also fairly typical for readFrom to be implemented as a method that just + * calls a constructor that takes {@linkplain StreamInput} as a parameter. This allows the fields in the implementer to be + * final. + */ public interface StreamableReader { /** - * Reads a copy of an object with the same type form the stream input - * - * The caller object remains unchanged. + * Reads an object of this type from the provided {@linkplain StreamInput}. The receiving instance remains unchanged. */ T readFrom(StreamInput in) throws IOException; } diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/Writeable.java b/core/src/main/java/org/elasticsearch/common/io/stream/Writeable.java index 9025315dc43..9ff3de736c5 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/Writeable.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/Writeable.java @@ -21,10 +21,20 @@ package org.elasticsearch.common.io.stream; import java.io.IOException; +/** + * Implementers can be written to a {@linkplain StreamOutput} and read from a {@linkplain StreamInput}. This allows them to be "thrown + * across the wire" using Elasticsearch's internal protocol. If the implementer also implements equals and hashCode then a copy made by + * serializing and deserializing must be equal and have the same hashCode. It isn't required that such a copy be entirely unchanged. For + * example, {@link org.elasticsearch.common.unit.TimeValue} converts the time to nanoseconds for serialization. + * {@linkplain org.elasticsearch.common.unit.TimeValue} actually implements {@linkplain Streamable} not {@linkplain Writeable} but it has + * the same contract. + * + * Prefer implementing this interface over implementing {@link Streamable} where possible. Lots of code depends on {@linkplain Streamable} + * so this isn't always possible. + */ public interface Writeable extends StreamableReader { - /** - * Writes the current object into the output stream out + * Write this into the {@linkplain StreamOutput}. */ void writeTo(StreamOutput out) throws IOException; } diff --git a/core/src/main/java/org/elasticsearch/common/logging/jdk/JdkESLoggerFactory.java b/core/src/main/java/org/elasticsearch/common/io/stream/package-info.java similarity index 60% rename from core/src/main/java/org/elasticsearch/common/logging/jdk/JdkESLoggerFactory.java rename to core/src/main/java/org/elasticsearch/common/io/stream/package-info.java index 8f3f0f564c0..6f84f31785c 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/jdk/JdkESLoggerFactory.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/package-info.java @@ -17,24 +17,7 @@ * under the License. */ -package org.elasticsearch.common.logging.jdk; - -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; - /** - * + * Classes for streaming objects from one Elasticsearch node to another over its binary internode protocol. */ -public class JdkESLoggerFactory extends ESLoggerFactory { - - @Override - protected ESLogger rootLogger() { - return getLogger(""); - } - - @Override - protected ESLogger newInstance(String prefix, String name) { - final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(name); - return new JdkESLogger(prefix, logger); - } -} +package org.elasticsearch.common.io.stream; \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/common/logging/log4j/ConsoleAppender.java b/core/src/main/java/org/elasticsearch/common/logging/ConsoleAppender.java similarity index 98% rename from core/src/main/java/org/elasticsearch/common/logging/log4j/ConsoleAppender.java rename to core/src/main/java/org/elasticsearch/common/logging/ConsoleAppender.java index 30c3aa91f5d..7c33389974f 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/log4j/ConsoleAppender.java +++ b/core/src/main/java/org/elasticsearch/common/logging/ConsoleAppender.java @@ -17,13 +17,12 @@ * under the License. */ -package org.elasticsearch.common.logging.log4j; +package org.elasticsearch.common.logging; import org.apache.log4j.Layout; import org.apache.log4j.WriterAppender; import org.apache.log4j.helpers.LogLog; import org.elasticsearch.common.SuppressForbidden; -import org.elasticsearch.common.logging.Loggers; import java.io.IOException; import java.io.OutputStream; diff --git a/core/src/main/java/org/elasticsearch/common/logging/ESLogger.java b/core/src/main/java/org/elasticsearch/common/logging/ESLogger.java index 06cce146b32..597619e61b4 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/ESLogger.java +++ b/core/src/main/java/org/elasticsearch/common/logging/ESLogger.java @@ -19,104 +19,188 @@ package org.elasticsearch.common.logging; +import org.apache.log4j.Level; +import org.apache.log4j.Logger; + +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; + /** - * Contract for all elasticsearch loggers. + * Elasticsearch's logger wrapper. */ -public interface ESLogger { +public class ESLogger { + private static final String FQCN = ESLogger.class.getName(); - String getPrefix(); + private final String prefix; + private final Logger logger; - String getName(); + public ESLogger(String prefix, Logger logger) { + this.prefix = prefix; + this.logger = logger; + } /** - * Allows to set the logger level - * If the new level is null, the logger will inherit its level - * from its nearest ancestor with a specific (non-null) level value. - * @param level the new level + * The prefix of the log. */ - void setLevel(String level); + public String getPrefix() { + return this.prefix; + } /** - * Returns the current logger level - * If the level is null, it means that the logger inherits its level - * from its nearest ancestor with a specific (non-null) level value. - * @return the logger level + * Fetch the underlying logger so we can look at it. Only exists for testing. */ - String getLevel(); + Logger getLogger() { + return logger; + } /** - * Returns {@code true} if a TRACE level message is logged. + * Set the level of the logger. If the new level is null, the logger will inherit it's level from its nearest ancestor with a non-null + * level. */ - boolean isTraceEnabled(); + public void setLevel(String level) { + if (level == null) { + logger.setLevel(null); + } else if ("error".equalsIgnoreCase(level)) { + logger.setLevel(Level.ERROR); + } else if ("warn".equalsIgnoreCase(level)) { + logger.setLevel(Level.WARN); + } else if ("info".equalsIgnoreCase(level)) { + logger.setLevel(Level.INFO); + } else if ("debug".equalsIgnoreCase(level)) { + logger.setLevel(Level.DEBUG); + } else if ("trace".equalsIgnoreCase(level)) { + logger.setLevel(Level.TRACE); + } + } /** - * Returns {@code true} if a DEBUG level message is logged. + * The level of this logger. If null then the logger is inheriting it's level from its nearest ancestor with a non-null level. */ - boolean isDebugEnabled(); + public String getLevel() { + if (logger.getLevel() == null) { + return null; + } + return logger.getLevel().toString(); + } /** - * Returns {@code true} if an INFO level message is logged. + * The name of this logger. */ - boolean isInfoEnabled(); + public String getName() { + return logger.getName(); + } /** - * Returns {@code true} if a WARN level message is logged. + * Returns {@code true} if a TRACE level message should be logged. */ - boolean isWarnEnabled(); + public boolean isTraceEnabled() { + return logger.isTraceEnabled(); + } /** - * Returns {@code true} if an ERROR level message is logged. + * Returns {@code true} if a DEBUG level message should be logged. */ - boolean isErrorEnabled(); + public boolean isDebugEnabled() { + return logger.isDebugEnabled(); + } + + /** + * Returns {@code true} if an INFO level message should be logged. + */ + public boolean isInfoEnabled() { + return logger.isInfoEnabled(); + } + + /** + * Returns {@code true} if a WARN level message should be logged. + */ + public boolean isWarnEnabled() { + return logger.isEnabledFor(Level.WARN); + } + + /** + * Returns {@code true} if an ERROR level message should be logged. + */ + public boolean isErrorEnabled() { + return logger.isEnabledFor(Level.ERROR); + } + + /** + * Logs a TRACE level message. + */ + public void trace(String msg, Object... params) { + trace(msg, null, params); + } + + /** + * Logs a TRACE level message with an exception. + */ + public void trace(String msg, Throwable cause, Object... params) { + if (isTraceEnabled()) { + logger.log(FQCN, Level.TRACE, format(prefix, msg, params), cause); + } + } /** * Logs a DEBUG level message. */ - void trace(String msg, Object... params); + public void debug(String msg, Object... params) { + debug(msg, null, params); + } /** - * Logs a DEBUG level message. + * Logs a DEBUG level message with an exception. */ - void trace(String msg, Throwable cause, Object... params); + public void debug(String msg, Throwable cause, Object... params) { + if (isDebugEnabled()) { + logger.log(FQCN, Level.DEBUG, format(prefix, msg, params), cause); + } + } /** - * Logs a DEBUG level message. + * Logs a INFO level message. */ - void debug(String msg, Object... params); + public void info(String msg, Object... params) { + info(msg, null, params); + } /** - * Logs a DEBUG level message. + * Logs a INFO level message with an exception. */ - void debug(String msg, Throwable cause, Object... params); - - /** - * Logs an INFO level message. - */ - void info(String msg, Object... params); - - /** - * Logs an INFO level message. - */ - void info(String msg, Throwable cause, Object... params); + public void info(String msg, Throwable cause, Object... params) { + if (isInfoEnabled()) { + logger.log(FQCN, Level.INFO, format(prefix, msg, params), cause); + } + } /** * Logs a WARN level message. */ - void warn(String msg, Object... params); + public void warn(String msg, Object... params) { + warn(msg, null, params); + } /** - * Logs a WARN level message. + * Logs a WARN level message with an exception. */ - void warn(String msg, Throwable cause, Object... params); + public void warn(String msg, Throwable cause, Object... params) { + if (isWarnEnabled()) { + logger.log(FQCN, Level.WARN, format(prefix, msg, params), cause); + } + } /** - * Logs an ERROR level message. + * Logs a ERROR level message. */ - void error(String msg, Object... params); + public void error(String msg, Object... params) { + error(msg, null, params); + } /** - * Logs an ERROR level message. + * Logs a ERROR level message with an exception. */ - void error(String msg, Throwable cause, Object... params); - + public void error(String msg, Throwable cause, Object... params) { + if (isErrorEnabled()) { + logger.log(FQCN, Level.ERROR, format(prefix, msg, params), cause); + } + } } diff --git a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java index 4fdde3db895..1cd3405bde6 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java +++ b/core/src/main/java/org/elasticsearch/common/logging/ESLoggerFactory.java @@ -19,62 +19,29 @@ package org.elasticsearch.common.logging; -import org.elasticsearch.common.logging.jdk.JdkESLoggerFactory; -import org.elasticsearch.common.logging.log4j.Log4jESLoggerFactory; -import org.elasticsearch.common.logging.slf4j.Slf4jESLoggerFactory; -import org.elasticsearch.common.settings.AbstractScopedSettings; +import org.apache.log4j.Logger; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; import java.util.Locale; -import java.util.Map; -import java.util.function.Consumer; -import java.util.regex.Pattern; /** * Factory to get {@link ESLogger}s */ public abstract class ESLoggerFactory { - public static final Setting LOG_DEFAULT_LEVEL_SETTING = new Setting<>("logger.level", LogLevel.INFO.name(), LogLevel::parse, false, Setting.Scope.CLUSTER); - public static final Setting LOG_LEVEL_SETTING = Setting.dynamicKeySetting("logger.", LogLevel.INFO.name(), LogLevel::parse, true, Setting.Scope.CLUSTER); - - private static volatile ESLoggerFactory defaultFactory = new JdkESLoggerFactory(); - - static { - try { - Class loggerClazz = Class.forName("org.apache.log4j.Logger"); - // below will throw a NoSuchMethod failure with using slf4j log4j bridge - loggerClazz.getMethod("setLevel", Class.forName("org.apache.log4j.Level")); - defaultFactory = new Log4jESLoggerFactory(); - } catch (Throwable e) { - // no log4j - try { - Class.forName("org.slf4j.Logger"); - defaultFactory = new Slf4jESLoggerFactory(); - } catch (Throwable e1) { - // no slf4j - } - } - } - - /** - * Changes the default factory. - */ - public static void setDefaultFactory(ESLoggerFactory defaultFactory) { - if (defaultFactory == null) { - throw new NullPointerException("defaultFactory"); - } - ESLoggerFactory.defaultFactory = defaultFactory; - } - + public static final Setting LOG_DEFAULT_LEVEL_SETTING = + new Setting<>("logger.level", LogLevel.INFO.name(), LogLevel::parse, false, Setting.Scope.CLUSTER); + public static final Setting LOG_LEVEL_SETTING = + Setting.dynamicKeySetting("logger.", LogLevel.INFO.name(), LogLevel::parse, true, Setting.Scope.CLUSTER); public static ESLogger getLogger(String prefix, String name) { - return defaultFactory.newInstance(prefix == null ? null : prefix.intern(), name.intern()); + prefix = prefix == null ? null : prefix.intern(); + name = name.intern(); + return new ESLogger(prefix, Logger.getLogger(name)); } public static ESLogger getLogger(String name) { - return defaultFactory.newInstance(name.intern()); + return getLogger(null, name); } public static DeprecationLogger getDeprecationLogger(String name) { @@ -86,17 +53,13 @@ public abstract class ESLoggerFactory { } public static ESLogger getRootLogger() { - return defaultFactory.rootLogger(); + return new ESLogger(null, Logger.getRootLogger()); } - public ESLogger newInstance(String name) { - return newInstance(null, name); + private ESLoggerFactory() { + // Utility class can't be built. } - protected abstract ESLogger rootLogger(); - - protected abstract ESLogger newInstance(String prefix, String name); - public enum LogLevel { WARN, TRACE, INFO, DEBUG, ERROR; public static LogLevel parse(String level) { diff --git a/core/src/main/java/org/elasticsearch/common/logging/log4j/LogConfigurator.java b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java similarity index 93% rename from core/src/main/java/org/elasticsearch/common/logging/log4j/LogConfigurator.java rename to core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java index e0d5f15630a..28feca13c02 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/log4j/LogConfigurator.java +++ b/core/src/main/java/org/elasticsearch/common/logging/LogConfigurator.java @@ -17,7 +17,7 @@ * under the License. */ -package org.elasticsearch.common.logging.log4j; +package org.elasticsearch.common.logging; import org.apache.log4j.PropertyConfigurator; import org.elasticsearch.ElasticsearchException; @@ -39,13 +39,14 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; +import java.util.Set; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.Strings.cleanPath; import static org.elasticsearch.common.settings.Settings.settingsBuilder; /** - * + * Configures log4j with a special set of replacements. */ public class LogConfigurator { @@ -54,10 +55,12 @@ public class LogConfigurator { private static final Map REPLACEMENTS; static { Map replacements = new HashMap<>(); - replacements.put("console", "org.elasticsearch.common.logging.log4j.ConsoleAppender"); + // Appenders replacements.put("async", "org.apache.log4j.AsyncAppender"); + replacements.put("console", ConsoleAppender.class.getName()); replacements.put("dailyRollingFile", "org.apache.log4j.DailyRollingFileAppender"); replacements.put("externallyRolledFile", "org.apache.log4j.ExternallyRolledFileAppender"); + replacements.put("extrasRollingFile", "org.apache.log4j.rolling.RollingFileAppender"); replacements.put("file", "org.apache.log4j.FileAppender"); replacements.put("jdbc", "org.apache.log4j.jdbc.JDBCAppender"); replacements.put("jms", "org.apache.log4j.net.JMSAppender"); @@ -65,17 +68,18 @@ public class LogConfigurator { replacements.put("ntevent", "org.apache.log4j.nt.NTEventLogAppender"); replacements.put("null", "org.apache.log4j.NullAppender"); replacements.put("rollingFile", "org.apache.log4j.RollingFileAppender"); - replacements.put("extrasRollingFile", "org.apache.log4j.rolling.RollingFileAppender"); replacements.put("smtp", "org.apache.log4j.net.SMTPAppender"); replacements.put("socket", "org.apache.log4j.net.SocketAppender"); replacements.put("socketHub", "org.apache.log4j.net.SocketHubAppender"); replacements.put("syslog", "org.apache.log4j.net.SyslogAppender"); replacements.put("telnet", "org.apache.log4j.net.TelnetAppender"); - replacements.put("terminal", "org.elasticsearch.common.logging.log4j.TerminalAppender"); - // policies + replacements.put("terminal", TerminalAppender.class.getName()); + + // Policies replacements.put("timeBased", "org.apache.log4j.rolling.TimeBasedRollingPolicy"); replacements.put("sizeBased", "org.apache.log4j.rolling.SizeBasedTriggeringPolicy"); - // layouts + + // Layouts replacements.put("simple", "org.apache.log4j.SimpleLayout"); replacements.put("html", "org.apache.log4j.HTMLLayout"); replacements.put("pattern", "org.apache.log4j.PatternLayout"); @@ -141,7 +145,8 @@ public class LogConfigurator { static void resolveConfig(Environment env, final Settings.Builder settingsBuilder) { try { - Files.walkFileTree(env.configFile(), EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new SimpleFileVisitor() { + Set options = EnumSet.of(FileVisitOption.FOLLOW_LINKS); + Files.walkFileTree(env.configFile(), options, Integer.MAX_VALUE, new SimpleFileVisitor() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { String fileName = file.getFileName().toString(); diff --git a/core/src/main/java/org/elasticsearch/common/logging/support/LoggerMessageFormat.java b/core/src/main/java/org/elasticsearch/common/logging/LoggerMessageFormat.java similarity index 94% rename from core/src/main/java/org/elasticsearch/common/logging/support/LoggerMessageFormat.java rename to core/src/main/java/org/elasticsearch/common/logging/LoggerMessageFormat.java index 8ec9f61df00..f1e135d645c 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/support/LoggerMessageFormat.java +++ b/core/src/main/java/org/elasticsearch/common/logging/LoggerMessageFormat.java @@ -17,13 +17,13 @@ * under the License. */ -package org.elasticsearch.common.logging.support; +package org.elasticsearch.common.logging; -import java.util.HashMap; -import java.util.Map; +import java.util.HashSet; +import java.util.Set; /** - * + * Format string for Elasticsearch log messages. */ public class LoggerMessageFormat { @@ -79,13 +79,13 @@ public class LoggerMessageFormat { // itself escaped: "abc x:\\{}" // we have to consume one backward slash sbuf.append(messagePattern.substring(i, j - 1)); - deeplyAppendParameter(sbuf, argArray[L], new HashMap()); + deeplyAppendParameter(sbuf, argArray[L], new HashSet()); i = j + 2; } } else { // normal case sbuf.append(messagePattern.substring(i, j)); - deeplyAppendParameter(sbuf, argArray[L], new HashMap()); + deeplyAppendParameter(sbuf, argArray[L], new HashSet()); i = j + 2; } } @@ -117,7 +117,7 @@ public class LoggerMessageFormat { } } - private static void deeplyAppendParameter(StringBuilder sbuf, Object o, Map seenMap) { + private static void deeplyAppendParameter(StringBuilder sbuf, Object o, Set seen) { if (o == null) { sbuf.append("null"); return; @@ -144,7 +144,7 @@ public class LoggerMessageFormat { } else if (o instanceof double[]) { doubleArrayAppend(sbuf, (double[]) o); } else { - objectArrayAppend(sbuf, (Object[]) o, seenMap); + objectArrayAppend(sbuf, (Object[]) o, seen); } } } @@ -159,18 +159,18 @@ public class LoggerMessageFormat { } - private static void objectArrayAppend(StringBuilder sbuf, Object[] a, Map seenMap) { + private static void objectArrayAppend(StringBuilder sbuf, Object[] a, Set seen) { sbuf.append('['); - if (!seenMap.containsKey(a)) { - seenMap.put(a, null); + if (!seen.contains(a)) { + seen.add(a); final int len = a.length; for (int i = 0; i < len; i++) { - deeplyAppendParameter(sbuf, a[i], seenMap); + deeplyAppendParameter(sbuf, a[i], seen); if (i != len - 1) sbuf.append(", "); } // allow repeats in siblings - seenMap.remove(a); + seen.remove(a); } else { sbuf.append("..."); } diff --git a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java index 6aecca2846c..a195ab450b5 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/Loggers.java +++ b/core/src/main/java/org/elasticsearch/common/logging/Loggers.java @@ -35,8 +35,6 @@ import static org.elasticsearch.common.util.CollectionUtils.asArrayList; /** * A set of utilities around Logging. - * - * */ public class Loggers { @@ -58,20 +56,24 @@ public class Loggers { return consoleLoggingEnabled; } - public static ESLogger getLogger(Class clazz, Settings settings, ShardId shardId, String... prefixes) { + public static ESLogger getLogger(Class clazz, Settings settings, ShardId shardId, String... prefixes) { return getLogger(clazz, settings, shardId.getIndex(), asArrayList(Integer.toString(shardId.id()), prefixes).toArray(new String[0])); } - /** Just like {@link #getLogger(Class, org.elasticsearch.common.settings.Settings,ShardId,String...)} but String loggerName instead of Class. */ + /** + * Just like {@link #getLogger(Class, org.elasticsearch.common.settings.Settings,ShardId,String...)} but String loggerName instead of + * Class. + */ public static ESLogger getLogger(String loggerName, Settings settings, ShardId shardId, String... prefixes) { - return getLogger(loggerName, settings, asArrayList(shardId.getIndexName(), Integer.toString(shardId.id()), prefixes).toArray(new String[0])); + return getLogger(loggerName, settings, + asArrayList(shardId.getIndexName(), Integer.toString(shardId.id()), prefixes).toArray(new String[0])); } - public static ESLogger getLogger(Class clazz, Settings settings, Index index, String... prefixes) { + public static ESLogger getLogger(Class clazz, Settings settings, Index index, String... prefixes) { return getLogger(clazz, settings, asArrayList(SPACE, index.getName(), prefixes).toArray(new String[0])); } - public static ESLogger getLogger(Class clazz, Settings settings, String... prefixes) { + public static ESLogger getLogger(Class clazz, Settings settings, String... prefixes) { return getLogger(buildClassLoggerName(clazz), settings, prefixes); } @@ -117,11 +119,11 @@ public class Loggers { return ESLoggerFactory.getLogger(getLoggerName(s)); } - public static ESLogger getLogger(Class clazz) { + public static ESLogger getLogger(Class clazz) { return ESLoggerFactory.getLogger(getLoggerName(buildClassLoggerName(clazz))); } - public static ESLogger getLogger(Class clazz, String... prefixes) { + public static ESLogger getLogger(Class clazz, String... prefixes) { return getLogger(buildClassLoggerName(clazz), prefixes); } @@ -146,7 +148,7 @@ public class Loggers { return ESLoggerFactory.getLogger(prefix, getLoggerName(name)); } - private static String buildClassLoggerName(Class clazz) { + private static String buildClassLoggerName(Class clazz) { String name = clazz.getName(); if (name.startsWith("org.elasticsearch.")) { name = Classes.getPackageName(clazz); diff --git a/core/src/main/java/org/elasticsearch/common/logging/log4j/TerminalAppender.java b/core/src/main/java/org/elasticsearch/common/logging/TerminalAppender.java similarity index 96% rename from core/src/main/java/org/elasticsearch/common/logging/log4j/TerminalAppender.java rename to core/src/main/java/org/elasticsearch/common/logging/TerminalAppender.java index 6e626060542..7031a62a999 100644 --- a/core/src/main/java/org/elasticsearch/common/logging/log4j/TerminalAppender.java +++ b/core/src/main/java/org/elasticsearch/common/logging/TerminalAppender.java @@ -18,7 +18,7 @@ */ -package org.elasticsearch.common.logging.log4j; +package org.elasticsearch.common.logging; import org.apache.log4j.AppenderSkeleton; import org.apache.log4j.spi.LoggingEvent; diff --git a/core/src/main/java/org/elasticsearch/common/logging/jdk/ESLogRecord.java b/core/src/main/java/org/elasticsearch/common/logging/jdk/ESLogRecord.java deleted file mode 100644 index c462262e9e9..00000000000 --- a/core/src/main/java/org/elasticsearch/common/logging/jdk/ESLogRecord.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.logging.jdk; - -import org.elasticsearch.common.logging.support.AbstractESLogger; - -import java.util.logging.Level; -import java.util.logging.LogRecord; - -/** - * A {@link LogRecord} which is used in conjunction with {@link JdkESLogger} - * with the ability to provide the class name, method name and line number - * information of the code calling the logger - */ -public class ESLogRecord extends LogRecord { - private static final String FQCN = AbstractESLogger.class.getName(); - private String sourceClassName; - private String sourceMethodName; - private transient boolean needToInferCaller; - - public ESLogRecord(Level level, String msg) { - super(level, msg); - needToInferCaller = true; - } - - @Override - public String getSourceClassName() { - if (needToInferCaller) { - inferCaller(); - } - return sourceClassName; - } - - @Override - public void setSourceClassName(String sourceClassName) { - this.sourceClassName = sourceClassName; - needToInferCaller = false; - } - - @Override - public String getSourceMethodName() { - if (needToInferCaller) { - inferCaller(); - } - return sourceMethodName; - } - - @Override - public void setSourceMethodName(String sourceMethodName) { - this.sourceMethodName = sourceMethodName; - needToInferCaller = false; - } - - /** - * Determines the source information for the caller of the logger (class - * name, method name, and line number) - */ - private void inferCaller() { - needToInferCaller = false; - Throwable throwable = new Throwable(); - - boolean lookingForLogger = true; - for (final StackTraceElement frame : throwable.getStackTrace()) { - String cname = frame.getClassName(); - boolean isLoggerImpl = isLoggerImplFrame(cname); - if (lookingForLogger) { - // Skip all frames until we have found the first logger frame. - if (isLoggerImpl) { - lookingForLogger = false; - } - } else { - if (!isLoggerImpl) { - // skip reflection call - if (!cname.startsWith("java.lang.reflect.") && !cname.startsWith("sun.reflect.")) { - // We've found the relevant frame. - setSourceClassName(cname); - setSourceMethodName(frame.getMethodName()); - return; - } - } - } - } - // We haven't found a suitable frame, so just punt. This is - // OK as we are only committed to making a "best effort" here. - } - - private boolean isLoggerImplFrame(String cname) { - // the log record could be created for a platform logger - return cname.equals(FQCN); - } -} diff --git a/core/src/main/java/org/elasticsearch/common/logging/jdk/JdkESLogger.java b/core/src/main/java/org/elasticsearch/common/logging/jdk/JdkESLogger.java deleted file mode 100644 index 2db16983e1a..00000000000 --- a/core/src/main/java/org/elasticsearch/common/logging/jdk/JdkESLogger.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.logging.jdk; - -import org.elasticsearch.common.logging.support.AbstractESLogger; - -import java.util.logging.Level; -import java.util.logging.LogRecord; -import java.util.logging.Logger; - -/** - * - */ -public class JdkESLogger extends AbstractESLogger { - - private final Logger logger; - - public JdkESLogger(String prefix, Logger logger) { - super(prefix); - this.logger = logger; - } - - @Override - public void setLevel(String level) { - if (level == null) { - logger.setLevel(null); - } else if ("error".equalsIgnoreCase(level)) { - logger.setLevel(Level.SEVERE); - } else if ("warn".equalsIgnoreCase(level)) { - logger.setLevel(Level.WARNING); - } else if ("info".equalsIgnoreCase(level)) { - logger.setLevel(Level.INFO); - } else if ("debug".equalsIgnoreCase(level)) { - logger.setLevel(Level.FINE); - } else if ("trace".equalsIgnoreCase(level)) { - logger.setLevel(Level.FINEST); - } - } - - @Override - public String getLevel() { - if (logger.getLevel() == null) { - return null; - } - return logger.getLevel().toString(); - } - - @Override - public String getName() { - return logger.getName(); - } - - @Override - public boolean isTraceEnabled() { - return logger.isLoggable(Level.FINEST); - } - - @Override - public boolean isDebugEnabled() { - return logger.isLoggable(Level.FINE); - } - - @Override - public boolean isInfoEnabled() { - return logger.isLoggable(Level.INFO); - } - - @Override - public boolean isWarnEnabled() { - return logger.isLoggable(Level.WARNING); - } - - @Override - public boolean isErrorEnabled() { - return logger.isLoggable(Level.SEVERE); - } - - @Override - protected void internalTrace(String msg) { - LogRecord record = new ESLogRecord(Level.FINEST, msg); - logger.log(record); - } - - @Override - protected void internalTrace(String msg, Throwable cause) { - LogRecord record = new ESLogRecord(Level.FINEST, msg); - record.setThrown(cause); - logger.log(record); - } - - @Override - protected void internalDebug(String msg) { - LogRecord record = new ESLogRecord(Level.FINE, msg); - logger.log(record); - } - - @Override - protected void internalDebug(String msg, Throwable cause) { - LogRecord record = new ESLogRecord(Level.FINE, msg); - record.setThrown(cause); - logger.log(record); - } - - @Override - protected void internalInfo(String msg) { - LogRecord record = new ESLogRecord(Level.INFO, msg); - logger.log(record); - } - - @Override - protected void internalInfo(String msg, Throwable cause) { - LogRecord record = new ESLogRecord(Level.INFO, msg); - record.setThrown(cause); - logger.log(record); - } - - @Override - protected void internalWarn(String msg) { - LogRecord record = new ESLogRecord(Level.WARNING, msg); - logger.log(record); - } - - @Override - protected void internalWarn(String msg, Throwable cause) { - LogRecord record = new ESLogRecord(Level.WARNING, msg); - record.setThrown(cause); - logger.log(record); - } - - @Override - protected void internalError(String msg) { - LogRecord record = new ESLogRecord(Level.SEVERE, msg); - logger.log(record); - } - - @Override - protected void internalError(String msg, Throwable cause) { - LogRecord record = new ESLogRecord(Level.SEVERE, msg); - record.setThrown(cause); - logger.log(record); - } - - protected Logger logger() { - return logger; - } -} diff --git a/core/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLogger.java b/core/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLogger.java deleted file mode 100644 index e74307f8a4d..00000000000 --- a/core/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLogger.java +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.logging.log4j; - -import org.apache.log4j.Level; -import org.apache.log4j.Logger; -import org.elasticsearch.common.logging.support.AbstractESLogger; - -/** - * - */ -public class Log4jESLogger extends AbstractESLogger { - - private final org.apache.log4j.Logger logger; - private final String FQCN = AbstractESLogger.class.getName(); - - public Log4jESLogger(String prefix, Logger logger) { - super(prefix); - this.logger = logger; - } - - public Logger logger() { - return logger; - } - - @Override - public void setLevel(String level) { - if (level == null) { - logger.setLevel(null); - } else if ("error".equalsIgnoreCase(level)) { - logger.setLevel(Level.ERROR); - } else if ("warn".equalsIgnoreCase(level)) { - logger.setLevel(Level.WARN); - } else if ("info".equalsIgnoreCase(level)) { - logger.setLevel(Level.INFO); - } else if ("debug".equalsIgnoreCase(level)) { - logger.setLevel(Level.DEBUG); - } else if ("trace".equalsIgnoreCase(level)) { - logger.setLevel(Level.TRACE); - } - } - - @Override - public String getLevel() { - if (logger.getLevel() == null) { - return null; - } - return logger.getLevel().toString(); - } - - @Override - public String getName() { - return logger.getName(); - } - - @Override - public boolean isTraceEnabled() { - return logger.isTraceEnabled(); - } - - @Override - public boolean isDebugEnabled() { - return logger.isDebugEnabled(); - } - - @Override - public boolean isInfoEnabled() { - return logger.isInfoEnabled(); - } - - @Override - public boolean isWarnEnabled() { - return logger.isEnabledFor(Level.WARN); - } - - @Override - public boolean isErrorEnabled() { - return logger.isEnabledFor(Level.ERROR); - } - - @Override - protected void internalTrace(String msg) { - logger.log(FQCN, Level.TRACE, msg, null); - } - - @Override - protected void internalTrace(String msg, Throwable cause) { - logger.log(FQCN, Level.TRACE, msg, cause); - } - - @Override - protected void internalDebug(String msg) { - logger.log(FQCN, Level.DEBUG, msg, null); - } - - @Override - protected void internalDebug(String msg, Throwable cause) { - logger.log(FQCN, Level.DEBUG, msg, cause); - } - - @Override - protected void internalInfo(String msg) { - logger.log(FQCN, Level.INFO, msg, null); - } - - @Override - protected void internalInfo(String msg, Throwable cause) { - logger.log(FQCN, Level.INFO, msg, cause); - } - - @Override - protected void internalWarn(String msg) { - logger.log(FQCN, Level.WARN, msg, null); - } - - @Override - protected void internalWarn(String msg, Throwable cause) { - logger.log(FQCN, Level.WARN, msg, cause); - } - - @Override - protected void internalError(String msg) { - logger.log(FQCN, Level.ERROR, msg, null); - } - - @Override - protected void internalError(String msg, Throwable cause) { - logger.log(FQCN, Level.ERROR, msg, cause); - } -} diff --git a/core/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerFactory.java b/core/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerFactory.java deleted file mode 100644 index b95e0987c90..00000000000 --- a/core/src/main/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerFactory.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.logging.log4j; - -import org.apache.log4j.Logger; -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; - -/** - * - */ -public class Log4jESLoggerFactory extends ESLoggerFactory { - - @Override - protected ESLogger rootLogger() { - return new Log4jESLogger(null, Logger.getRootLogger()); - } - - @Override - protected ESLogger newInstance(String prefix, String name) { - final org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(name); - return new Log4jESLogger(prefix, logger); - } -} diff --git a/core/src/main/java/org/elasticsearch/common/logging/slf4j/Slf4jESLogger.java b/core/src/main/java/org/elasticsearch/common/logging/slf4j/Slf4jESLogger.java deleted file mode 100644 index fc40ec00b01..00000000000 --- a/core/src/main/java/org/elasticsearch/common/logging/slf4j/Slf4jESLogger.java +++ /dev/null @@ -1,179 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.logging.slf4j; - -import org.elasticsearch.common.logging.support.AbstractESLogger; -import org.slf4j.Logger; -import org.slf4j.spi.LocationAwareLogger; - -/** - * - */ -public class Slf4jESLogger extends AbstractESLogger { - - private final Logger logger; - private final LocationAwareLogger lALogger; - private final String FQCN = AbstractESLogger.class.getName(); - - public Slf4jESLogger(String prefix, Logger logger) { - super(prefix); - this.logger = logger; - if (logger instanceof LocationAwareLogger) { - lALogger = (LocationAwareLogger) logger; - } else { - lALogger = null; - } - } - - @Override - public void setLevel(String level) { - // can't set it in slf4j... - } - - @Override - public String getLevel() { - // can't get it in slf4j... - return null; - } - - @Override - public String getName() { - return logger.getName(); - } - - @Override - public boolean isTraceEnabled() { - return logger.isTraceEnabled(); - } - - @Override - public boolean isDebugEnabled() { - return logger.isDebugEnabled(); - } - - @Override - public boolean isInfoEnabled() { - return logger.isInfoEnabled(); - } - - @Override - public boolean isWarnEnabled() { - return logger.isWarnEnabled(); - } - - @Override - public boolean isErrorEnabled() { - return logger.isErrorEnabled(); - } - - @Override - protected void internalTrace(String msg) { - if (lALogger != null) { - lALogger.log(null, FQCN, LocationAwareLogger.TRACE_INT, msg, null, null); - } else { - logger.trace(msg); - } - } - - @Override - protected void internalTrace(String msg, Throwable cause) { - if (lALogger != null) { - lALogger.log(null, FQCN, LocationAwareLogger.TRACE_INT, msg, null, cause); - } else { - logger.trace(msg); - } - } - - @Override - protected void internalDebug(String msg) { - if (lALogger != null) { - lALogger.log(null, FQCN, LocationAwareLogger.DEBUG_INT, msg, null, null); - } else { - logger.debug(msg); - } - } - - @Override - protected void internalDebug(String msg, Throwable cause) { - if (lALogger != null) { - lALogger.log(null, FQCN, LocationAwareLogger.DEBUG_INT, msg, null, cause); - } else { - logger.debug(msg); - } - } - - @Override - protected void internalInfo(String msg) { - if (lALogger != null) { - lALogger.log(null, FQCN, LocationAwareLogger.INFO_INT, msg, null, null); - } else { - logger.info(msg); - } - } - - @Override - protected void internalInfo(String msg, Throwable cause) { - if (lALogger != null) { - lALogger.log(null, FQCN, LocationAwareLogger.INFO_INT, msg, null, cause); - } else { - logger.info(msg, cause); - } - } - - @Override - protected void internalWarn(String msg) { - if (lALogger != null) { - lALogger.log(null, FQCN, LocationAwareLogger.WARN_INT, msg, null, null); - } else { - logger.warn(msg); - } - } - - @Override - protected void internalWarn(String msg, Throwable cause) { - if (lALogger != null) { - lALogger.log(null, FQCN, LocationAwareLogger.WARN_INT, msg, null, cause); - } else { - logger.warn(msg); - } - } - - @Override - protected void internalError(String msg) { - if (lALogger != null) { - lALogger.log(null, FQCN, LocationAwareLogger.ERROR_INT, msg, null, null); - } else { - logger.error(msg); - } - } - - @Override - protected void internalError(String msg, Throwable cause) { - if (lALogger != null) { - lALogger.log(null, FQCN, LocationAwareLogger.ERROR_INT, msg, null, cause); - } else { - logger.error(msg); - } - } - - protected Logger logger() { - return logger; - } -} diff --git a/core/src/main/java/org/elasticsearch/common/logging/slf4j/Slf4jESLoggerFactory.java b/core/src/main/java/org/elasticsearch/common/logging/slf4j/Slf4jESLoggerFactory.java deleted file mode 100644 index 1fa13863815..00000000000 --- a/core/src/main/java/org/elasticsearch/common/logging/slf4j/Slf4jESLoggerFactory.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.logging.slf4j; - -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.common.logging.ESLoggerFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * - */ -public class Slf4jESLoggerFactory extends ESLoggerFactory { - - @Override - protected ESLogger rootLogger() { - return getLogger(Logger.ROOT_LOGGER_NAME); - } - - @Override - protected ESLogger newInstance(String prefix, String name) { - return new Slf4jESLogger(prefix, LoggerFactory.getLogger(name)); - } -} diff --git a/core/src/main/java/org/elasticsearch/common/logging/support/AbstractESLogger.java b/core/src/main/java/org/elasticsearch/common/logging/support/AbstractESLogger.java deleted file mode 100644 index 441e2418e6c..00000000000 --- a/core/src/main/java/org/elasticsearch/common/logging/support/AbstractESLogger.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.logging.support; - -import org.elasticsearch.common.logging.ESLogger; - -/** - * - */ -public abstract class AbstractESLogger implements ESLogger { - - private final String prefix; - - protected AbstractESLogger(String prefix) { - this.prefix = prefix; - } - - @Override - public String getPrefix() { - return this.prefix; - } - - @Override - public void trace(String msg, Object... params) { - if (isTraceEnabled()) { - internalTrace(LoggerMessageFormat.format(prefix, msg, params)); - } - } - - protected abstract void internalTrace(String msg); - - @Override - public void trace(String msg, Throwable cause, Object... params) { - if (isTraceEnabled()) { - internalTrace(LoggerMessageFormat.format(prefix, msg, params), cause); - } - } - - protected abstract void internalTrace(String msg, Throwable cause); - - - @Override - public void debug(String msg, Object... params) { - if (isDebugEnabled()) { - internalDebug(LoggerMessageFormat.format(prefix, msg, params)); - } - } - - protected abstract void internalDebug(String msg); - - @Override - public void debug(String msg, Throwable cause, Object... params) { - if (isDebugEnabled()) { - internalDebug(LoggerMessageFormat.format(prefix, msg, params), cause); - } - } - - protected abstract void internalDebug(String msg, Throwable cause); - - - @Override - public void info(String msg, Object... params) { - if (isInfoEnabled()) { - internalInfo(LoggerMessageFormat.format(prefix, msg, params)); - } - } - - protected abstract void internalInfo(String msg); - - @Override - public void info(String msg, Throwable cause, Object... params) { - if (isInfoEnabled()) { - internalInfo(LoggerMessageFormat.format(prefix, msg, params), cause); - } - } - - protected abstract void internalInfo(String msg, Throwable cause); - - - @Override - public void warn(String msg, Object... params) { - if (isWarnEnabled()) { - internalWarn(LoggerMessageFormat.format(prefix, msg, params)); - } - } - - protected abstract void internalWarn(String msg); - - @Override - public void warn(String msg, Throwable cause, Object... params) { - if (isWarnEnabled()) { - internalWarn(LoggerMessageFormat.format(prefix, msg, params), cause); - } - } - - protected abstract void internalWarn(String msg, Throwable cause); - - - @Override - public void error(String msg, Object... params) { - if (isErrorEnabled()) { - internalError(LoggerMessageFormat.format(prefix, msg, params)); - } - } - - protected abstract void internalError(String msg); - - @Override - public void error(String msg, Throwable cause, Object... params) { - if (isErrorEnabled()) { - internalError(LoggerMessageFormat.format(prefix, msg, params), cause); - } - } - - protected abstract void internalError(String msg, Throwable cause); -} diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index b30178857e1..8f5373dfe3b 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.settings; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.common.util.set.Sets; import java.util.ArrayList; import java.util.Collections; @@ -63,6 +62,11 @@ public abstract class AbstractScopedSettings extends AbstractComponent { throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "]"); } if (setting.hasComplexMatcher()) { + Setting overlappingSetting = findOverlappingSetting(setting, complexMatchers); + if (overlappingSetting != null) { + throw new IllegalArgumentException("complex setting key: [" + setting.getKey() + "] overlaps existing setting key: [" + + overlappingSetting.getKey() + "]"); + } complexMatchers.putIfAbsent(setting.getKey(), setting); } else { keySettings.putIfAbsent(setting.getKey(), setting); @@ -410,4 +414,19 @@ public abstract class AbstractScopedSettings extends AbstractComponent { return changed; } + private static Setting findOverlappingSetting(Setting newSetting, Map> complexMatchers) { + assert newSetting.hasComplexMatcher(); + if (complexMatchers.containsKey(newSetting.getKey())) { + // we return null here because we use a putIfAbsent call when inserting into the map, so if it exists then we already checked + // the setting to make sure there are no overlapping settings. + return null; + } + + for (Setting existingSetting : complexMatchers.values()) { + if (newSetting.match(existingSetting.getKey()) || existingSetting.match(newSetting.getKey())) { + return existingSetting; + } + } + return null; + } } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/ConcurrentCollections.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/ConcurrentCollections.java index 74a1c13ce3d..1c78b92bebb 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/ConcurrentCollections.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/ConcurrentCollections.java @@ -36,8 +36,6 @@ import java.util.concurrent.LinkedTransferQueue; */ public abstract class ConcurrentCollections { - private final static boolean useLinkedTransferQueue = Boolean.parseBoolean(System.getProperty("es.useLinkedTransferQueue", "false")); - static final int aggressiveConcurrencyLevel; static { @@ -71,9 +69,6 @@ public abstract class ConcurrentCollections { } public static Queue newQueue() { - if (useLinkedTransferQueue) { - return new LinkedTransferQueue<>(); - } return new ConcurrentLinkedQueue<>(); } diff --git a/core/src/main/java/org/elasticsearch/discovery/Discovery.java b/core/src/main/java/org/elasticsearch/discovery/Discovery.java index c36fa17415c..fbb85e8e29f 100644 --- a/core/src/main/java/org/elasticsearch/discovery/Discovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/Discovery.java @@ -94,6 +94,12 @@ public interface Discovery extends LifecycleComponent { DiscoveryStats stats(); + /** + * Triggers the first join cycle + */ + void startInitialJoin(); + + /*** * @return the current value of minimum master nodes, or -1 for not set */ diff --git a/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java b/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java index bef1c8fe5ec..ca4e9dd60bc 100644 --- a/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java +++ b/core/src/main/java/org/elasticsearch/discovery/DiscoveryService.java @@ -87,8 +87,9 @@ public class DiscoveryService extends AbstractLifecycleComponent implem @Override protected void doStart() { + + } + + @Override + public void startInitialJoin() { synchronized (clusterGroups) { ClusterGroup clusterGroup = clusterGroups.get(clusterName); if (clusterGroup == null) { diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index ce083147117..05201c18672 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -216,7 +216,10 @@ public class ZenDiscovery extends AbstractLifecycleComponent implemen joinThreadControl.start(); pingService.start(); this.nodeJoinController = new NodeJoinController(clusterService, routingService, discoverySettings, settings); + } + @Override + public void startInitialJoin() { // start the join thread from a cluster state update. See {@link JoinThreadControl} for details. clusterService.submitStateUpdateTask("initial_join", new ClusterStateUpdateTask() { diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java index 43b22d6c0bb..9354d593a3f 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayService.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayService.java @@ -133,27 +133,6 @@ public class GatewayService extends AbstractLifecycleComponent i @Override protected void doStart() { clusterService.addLast(this); - // check we didn't miss any cluster state that came in until now / during the addition - clusterService.submitStateUpdateTask("gateway_initial_state_recovery", new ClusterStateUpdateTask() { - - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - checkStateMeetsSettingsAndMaybeRecover(currentState); - return currentState; - } - - @Override - public boolean runOnlyOnMaster() { - // It's OK to run on non masters as checkStateMeetsSettingsAndMaybeRecover checks for this - // we return false to avoid unneeded failure logs - return false; - } - - @Override - public void onFailure(String source, Throwable t) { - logger.warn("unexpected failure while checking if state can be recovered. another attempt will be made with the next cluster state change", t); - } - }); } @Override @@ -170,10 +149,9 @@ public class GatewayService extends AbstractLifecycleComponent i if (lifecycle.stoppedOrClosed()) { return; } - checkStateMeetsSettingsAndMaybeRecover(event.state()); - } - protected void checkStateMeetsSettingsAndMaybeRecover(ClusterState state) { + final ClusterState state = event.state(); + if (state.nodes().localNodeMaster() == false) { // not our job to recover return; diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index f9eb3ec2b54..eabc0951e7f 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -154,7 +154,7 @@ public final class IndexModule { */ public void addIndexStore(String type, BiFunction provider) { if (storeTypes.containsKey(type)) { - throw new IllegalArgumentException("key [" + type +"] already registerd"); + throw new IllegalArgumentException("key [" + type +"] already registered"); } storeTypes.put(type, provider); } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CommonGramsTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/CommonGramsTokenFilterFactory.java index 1843fb58e56..0225ec64559 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/CommonGramsTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/CommonGramsTokenFilterFactory.java @@ -45,7 +45,7 @@ public class CommonGramsTokenFilterFactory extends AbstractTokenFilterFactory { this.words = Analysis.parseCommonWords(env, settings, null, ignoreCase); if (this.words == null) { - throw new IllegalArgumentException("mising or empty [common_words] or [common_words_path] configuration for common_grams token filter"); + throw new IllegalArgumentException("missing or empty [common_words] or [common_words_path] configuration for common_grams token filter"); } } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java index 3696462c4f5..1cd3abb0cb3 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java @@ -52,7 +52,7 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { List rules = Analysis.getWordList(env, settings, "synonyms"); StringBuilder sb = new StringBuilder(); for (String line : rules) { - sb.append(line).append(System.getProperty("line.separator")); + sb.append(line).append(System.lineSeparator()); } rulesReader = new FastStringReader(sb.toString()); } else if (settings.get("synonyms_path") != null) { diff --git a/core/src/main/java/org/elasticsearch/index/percolator/ExtractQueryTermsService.java b/core/src/main/java/org/elasticsearch/index/percolator/ExtractQueryTermsService.java index 2dfc9407fe6..8ee9d55c312 100644 --- a/core/src/main/java/org/elasticsearch/index/percolator/ExtractQueryTermsService.java +++ b/core/src/main/java/org/elasticsearch/index/percolator/ExtractQueryTermsService.java @@ -36,7 +36,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.elasticsearch.common.logging.support.LoggerMessageFormat; +import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.index.mapper.ParseContext; import java.io.IOException; diff --git a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java index de5d4172630..033f668eada 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java @@ -178,7 +178,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder(); diff --git a/core/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java b/core/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java index c98ea69f87f..4cdbb28a68e 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java +++ b/core/src/main/java/org/elasticsearch/index/translog/BaseTranslogReader.java @@ -38,7 +38,7 @@ public abstract class BaseTranslogReader implements Comparable(channel)); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java index 4c9213945fa..992267fa8a5 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/cluster/node/tasks/RestListTasksAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.support.RestToXContentListener; +import org.elasticsearch.tasks.TaskId; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -39,24 +40,22 @@ public class RestListTasksAction extends BaseRestHandler { public RestListTasksAction(Settings settings, RestController controller, Client client) { super(settings, client); controller.registerHandler(GET, "/_tasks", this); - controller.registerHandler(GET, "/_tasks/{nodeId}", this); - controller.registerHandler(GET, "/_tasks/{nodeId}/{taskId}", this); + controller.registerHandler(GET, "/_tasks/{taskId}", this); } @Override public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { boolean detailed = request.paramAsBoolean("detailed", false); - String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId")); - long taskId = request.paramAsLong("taskId", ListTasksRequest.ALL_TASKS); + String[] nodesIds = Strings.splitStringByCommaToArray(request.param("node_id")); + TaskId taskId = new TaskId(request.param("taskId")); String[] actions = Strings.splitStringByCommaToArray(request.param("actions")); - String parentNode = request.param("parent_node"); - long parentTaskId = request.paramAsLong("parent_task", ListTasksRequest.ALL_TASKS); + TaskId parentTaskId = new TaskId(request.param("parent_task_id")); - ListTasksRequest listTasksRequest = new ListTasksRequest(nodesIds); + ListTasksRequest listTasksRequest = new ListTasksRequest(); listTasksRequest.taskId(taskId); + listTasksRequest.nodesIds(nodesIds); listTasksRequest.detailed(detailed); listTasksRequest.actions(actions); - listTasksRequest.parentNode(parentNode); listTasksRequest.parentTaskId(parentTaskId); client.admin().cluster().listTasks(listTasksRequest, new RestToXContentListener<>(channel)); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java index e440e1b95c7..4e90a6a3a85 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/analyze/RestAnalyzeAction.java @@ -102,7 +102,7 @@ public class RestAnalyzeAction extends BaseRestHandler { public static void buildFromContent(BytesReference content, AnalyzeRequest analyzeRequest, ParseFieldMatcher parseFieldMatcher) { try (XContentParser parser = XContentHelper.createParser(content)) { if (parser.nextToken() != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("Malforrmed content, must start with an object"); + throw new IllegalArgumentException("Malformed content, must start with an object"); } else { XContentParser.Token token; String currentFieldName = null; diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java index 9e9964245e4..3a10db38ee1 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchScrollAction.java @@ -87,7 +87,7 @@ public class RestSearchScrollAction extends BaseRestHandler { public static void buildFromContent(BytesReference content, SearchScrollRequest searchScrollRequest) { try (XContentParser parser = XContentHelper.createParser(content)) { if (parser.nextToken() != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("Malforrmed content, must start with an object"); + throw new IllegalArgumentException("Malformed content, must start with an object"); } else { XContentParser.Token token; String currentFieldName = null; diff --git a/core/src/main/java/org/elasticsearch/script/Script.java b/core/src/main/java/org/elasticsearch/script/Script.java index dc0e3835c34..65a117d014f 100644 --- a/core/src/main/java/org/elasticsearch/script/Script.java +++ b/core/src/main/java/org/elasticsearch/script/Script.java @@ -26,7 +26,7 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.common.logging.support.LoggerMessageFormat; +import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index ce7ef3e5b37..8766bc7d1d3 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -33,6 +33,7 @@ import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.Multibinder; +import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.BoolQueryParser; @@ -287,14 +288,15 @@ public class SearchModule extends AbstractModule { /** * Register a new ScoreFunctionParser. */ - public void registerFunctionScoreParser(ScoreFunctionParser parser) { + public void registerFunctionScoreParser(ScoreFunctionParser parser) { for (String name: parser.getNames()) { Object oldValue = functionScoreParsers.putIfAbsent(name, parser); if (oldValue != null) { throw new IllegalArgumentException("Function score parser [" + oldValue + "] already registered for name [" + name + "]"); } } - namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, parser.getBuilderPrototype()); + @SuppressWarnings("unchecked") NamedWriteable sfb = parser.getBuilderPrototype(); + namedWriteableRegistry.registerPrototype(ScoreFunctionBuilder.class, sfb); } public void registerQueryParser(Supplier> parser) { @@ -358,14 +360,15 @@ public class SearchModule extends AbstractModule { public IndicesQueriesRegistry buildQueryParserRegistry() { Map> queryParsersMap = new HashMap<>(); for (Supplier> parserSupplier : queryParsers) { - QueryParser parser = parserSupplier.get(); + QueryParser parser = parserSupplier.get(); for (String name: parser.names()) { Object oldValue = queryParsersMap.putIfAbsent(name, parser); if (oldValue != null) { throw new IllegalArgumentException("Query parser [" + oldValue + "] already registered for name [" + name + "] while trying to register [" + parser + "]"); } } - namedWriteableRegistry.registerPrototype(QueryBuilder.class, parser.getBuilderPrototype()); + @SuppressWarnings("unchecked") NamedWriteable qb = parser.getBuilderPrototype(); + namedWriteableRegistry.registerPrototype(QueryBuilder.class, qb); } return new IndicesQueriesRegistry(settings, queryParsersMap); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java index 14a053d875b..beab9ed53b4 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregatorFactories.java @@ -199,7 +199,7 @@ public class AggregatorFactories { List> orderedPipelineAggregators, List> unmarkedBuilders, Set> temporarilyMarked, PipelineAggregatorBuilder builder) { if (temporarilyMarked.contains(builder)) { - throw new IllegalArgumentException("Cyclical dependancy found with pipeline aggregator [" + builder.getName() + "]"); + throw new IllegalArgumentException("Cyclical dependency found with pipeline aggregator [" + builder.getName() + "]"); } else if (unmarkedBuilders.contains(builder)) { temporarilyMarked.add(builder); String[] bucketsPaths = builder.getBucketsPaths(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java index 5ed1f953e5f..ccaa3738651 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTermsAggregatorFactory.java @@ -228,7 +228,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac metaData); } - throw new AggregationExecutionException("sigfnificant_terms aggregation cannot be applied to field [" + throw new AggregationExecutionException("significant_terms aggregation cannot be applied to field [" + config.fieldContext().field() + "]. It can only be applied to numeric or string fields."); } diff --git a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java index d79b1f59a6a..ad8e71f5b93 100644 --- a/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java +++ b/core/src/main/java/org/elasticsearch/search/controller/SearchPhaseController.java @@ -83,10 +83,8 @@ public class SearchPhaseController extends AbstractComponent { }; public static final ScoreDoc[] EMPTY_DOCS = new ScoreDoc[0]; - public static final String SEARCH_CONTROLLER_OPTIMIZE_SINGLE_SHARD_KEY = "search.controller.optimize_single_shard"; private final BigArrays bigArrays; - private final boolean optimizeSingleShard; private ScriptService scriptService; @@ -95,11 +93,6 @@ public class SearchPhaseController extends AbstractComponent { super(settings); this.bigArrays = bigArrays; this.scriptService = scriptService; - this.optimizeSingleShard = settings.getAsBoolean(SEARCH_CONTROLLER_OPTIMIZE_SINGLE_SHARD_KEY, true); - } - - public boolean optimizeSingleShard() { - return optimizeSingleShard; } public AggregatedDfs aggregateDfs(AtomicArray results) { @@ -168,50 +161,48 @@ public class SearchPhaseController extends AbstractComponent { return EMPTY_DOCS; } - if (optimizeSingleShard) { - boolean canOptimize = false; - QuerySearchResult result = null; - int shardIndex = -1; - if (results.size() == 1) { - canOptimize = true; - result = results.get(0).value.queryResult(); - shardIndex = results.get(0).index; - } else { - // lets see if we only got hits from a single shard, if so, we can optimize... - for (AtomicArray.Entry entry : results) { - if (entry.value.queryResult().topDocs().scoreDocs.length > 0) { - if (result != null) { // we already have one, can't really optimize - canOptimize = false; - break; - } - canOptimize = true; - result = entry.value.queryResult(); - shardIndex = entry.index; + boolean canOptimize = false; + QuerySearchResult result = null; + int shardIndex = -1; + if (results.size() == 1) { + canOptimize = true; + result = results.get(0).value.queryResult(); + shardIndex = results.get(0).index; + } else { + // lets see if we only got hits from a single shard, if so, we can optimize... + for (AtomicArray.Entry entry : results) { + if (entry.value.queryResult().topDocs().scoreDocs.length > 0) { + if (result != null) { // we already have one, can't really optimize + canOptimize = false; + break; } + canOptimize = true; + result = entry.value.queryResult(); + shardIndex = entry.index; } } - if (canOptimize) { - int offset = result.from(); - if (ignoreFrom) { - offset = 0; - } - ScoreDoc[] scoreDocs = result.topDocs().scoreDocs; - if (scoreDocs.length == 0 || scoreDocs.length < offset) { - return EMPTY_DOCS; - } + } + if (canOptimize) { + int offset = result.from(); + if (ignoreFrom) { + offset = 0; + } + ScoreDoc[] scoreDocs = result.topDocs().scoreDocs; + if (scoreDocs.length == 0 || scoreDocs.length < offset) { + return EMPTY_DOCS; + } - int resultDocsSize = result.size(); - if ((scoreDocs.length - offset) < resultDocsSize) { - resultDocsSize = scoreDocs.length - offset; - } - ScoreDoc[] docs = new ScoreDoc[resultDocsSize]; - for (int i = 0; i < resultDocsSize; i++) { - ScoreDoc scoreDoc = scoreDocs[offset + i]; - scoreDoc.shardIndex = shardIndex; - docs[i] = scoreDoc; - } - return docs; + int resultDocsSize = result.size(); + if ((scoreDocs.length - offset) < resultDocsSize) { + resultDocsSize = scoreDocs.length - offset; } + ScoreDoc[] docs = new ScoreDoc[resultDocsSize]; + for (int i = 0; i < resultDocsSize; i++) { + ScoreDoc scoreDoc = scoreDocs[offset + i]; + scoreDoc.shardIndex = shardIndex; + docs[i] = scoreDoc; + } + return docs; } @SuppressWarnings("unchecked") diff --git a/core/src/main/java/org/elasticsearch/tasks/CancellableTask.java b/core/src/main/java/org/elasticsearch/tasks/CancellableTask.java index 3297977cb3a..8916a8be7cb 100644 --- a/core/src/main/java/org/elasticsearch/tasks/CancellableTask.java +++ b/core/src/main/java/org/elasticsearch/tasks/CancellableTask.java @@ -32,8 +32,8 @@ public class CancellableTask extends Task { super(id, type, action, description); } - public CancellableTask(long id, String type, String action, String description, String parentNode, long parentId) { - super(id, type, action, description, parentNode, parentId); + public CancellableTask(long id, String type, String action, String description, TaskId parentTaskId) { + super(id, type, action, description, parentTaskId); } /** diff --git a/core/src/main/java/org/elasticsearch/tasks/Task.java b/core/src/main/java/org/elasticsearch/tasks/Task.java index 621166c9ccc..5aa034b7997 100644 --- a/core/src/main/java/org/elasticsearch/tasks/Task.java +++ b/core/src/main/java/org/elasticsearch/tasks/Task.java @@ -30,8 +30,6 @@ import org.elasticsearch.common.xcontent.ToXContent; */ public class Task { - public static final long NO_PARENT_ID = 0; - private final long id; private final String type; @@ -40,22 +38,18 @@ public class Task { private final String description; - private final String parentNode; - - private final long parentId; - + private final TaskId parentTask; public Task(long id, String type, String action, String description) { - this(id, type, action, description, null, NO_PARENT_ID); + this(id, type, action, description, TaskId.EMPTY_TASK_ID); } - public Task(long id, String type, String action, String description, String parentNode, long parentId) { + public Task(long id, String type, String action, String description, TaskId parentTask) { this.id = id; this.type = type; this.action = action; this.description = description; - this.parentNode = parentNode; - this.parentId = parentId; + this.parentTask = parentTask; } /** @@ -75,7 +69,7 @@ public class Task { description = getDescription(); status = getStatus(); } - return new TaskInfo(node, getId(), getType(), getAction(), description, status, parentNode, parentId); + return new TaskInfo(node, getId(), getType(), getAction(), description, status, parentTask); } /** @@ -106,18 +100,11 @@ public class Task { return description; } - /** - * Returns the parent node of the task or null if the task doesn't have any parent tasks - */ - public String getParentNode() { - return parentNode; - } - /** * Returns id of the parent task or NO_PARENT_ID if the task doesn't have any parent tasks */ - public long getParentId() { - return parentId; + public TaskId getParentTaskId() { + return parentTask; } /** diff --git a/core/src/main/java/org/elasticsearch/tasks/TaskId.java b/core/src/main/java/org/elasticsearch/tasks/TaskId.java new file mode 100644 index 00000000000..5c5ad36cc17 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/tasks/TaskId.java @@ -0,0 +1,118 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tasks; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; + +/** + * Task id that consists of node id and id of the task on the node + */ +public final class TaskId implements Writeable { + + public final static TaskId EMPTY_TASK_ID = new TaskId("", -1L); + + private final String nodeId; + private final long id; + + public TaskId(String nodeId, long id) { + this.nodeId = nodeId; + this.id = id; + } + + public TaskId(String taskId) { + if (Strings.hasLength(taskId) && "unset".equals(taskId) == false) { + String[] s = Strings.split(taskId, ":"); + if (s == null || s.length != 2) { + throw new IllegalArgumentException("malformed task id " + taskId); + } + this.nodeId = s[0]; + try { + this.id = Long.parseLong(s[1]); + } catch (NumberFormatException ex) { + throw new IllegalArgumentException("malformed task id " + taskId, ex); + } + } else { + nodeId = ""; + id = -1L; + } + } + + public TaskId(StreamInput in) throws IOException { + nodeId = in.readString(); + id = in.readLong(); + } + + public String getNodeId() { + return nodeId; + } + + public long getId() { + return id; + } + + public boolean isSet() { + return id == -1L; + } + + @Override + public String toString() { + if (isSet()) { + return "unset"; + } else { + return nodeId + ":" + id; + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(nodeId); + out.writeLong(id); + } + + @Override + public TaskId readFrom(StreamInput in) throws IOException { + return new TaskId(in); + } + + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + TaskId taskId = (TaskId) o; + + if (id != taskId.id) return false; + return nodeId.equals(taskId.nodeId); + + } + + @Override + public int hashCode() { + int result = nodeId.hashCode(); + result = 31 * result + (int) (id ^ (id >>> 32)); + return result; + } +} diff --git a/core/src/main/java/org/elasticsearch/tasks/TaskManager.java b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java index f30330ec28f..0c785573c99 100644 --- a/core/src/main/java/org/elasticsearch/tasks/TaskManager.java +++ b/core/src/main/java/org/elasticsearch/tasks/TaskManager.java @@ -22,7 +22,6 @@ package org.elasticsearch.tasks; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; @@ -51,7 +50,7 @@ public class TaskManager extends AbstractComponent implements ClusterStateListen private final AtomicLong taskIdGenerator = new AtomicLong(); - private final Map, String> banedParents = new ConcurrentHashMap<>(); + private final Map banedParents = new ConcurrentHashMap<>(); public TaskManager(Settings settings) { super(settings); @@ -77,8 +76,8 @@ public class TaskManager extends AbstractComponent implements ClusterStateListen CancellableTaskHolder oldHolder = cancellableTasks.put(task.getId(), holder); assert oldHolder == null; // Check if this task was banned before we start it - if (task.getParentNode() != null && banedParents.isEmpty() == false) { - String reason = banedParents.get(new Tuple<>(task.getParentNode(), task.getParentId())); + if (task.getParentTaskId().isSet() == false && banedParents.isEmpty() == false) { + String reason = banedParents.get(task.getParentTaskId()); if (reason != null) { try { holder.cancel(reason); @@ -191,22 +190,21 @@ public class TaskManager extends AbstractComponent implements ClusterStateListen *

* This method is called when a parent task that has children is cancelled. */ - public void setBan(String parentNode, long parentId, String reason) { - logger.trace("setting ban for the parent task {}:{} {}", parentNode, parentId, reason); + public void setBan(TaskId parentTaskId, String reason) { + logger.trace("setting ban for the parent task {} {}", parentTaskId, reason); // Set the ban first, so the newly created tasks cannot be registered - Tuple ban = new Tuple<>(parentNode, parentId); synchronized (banedParents) { - if (lastDiscoveryNodes.nodeExists(parentNode)) { + if (lastDiscoveryNodes.nodeExists(parentTaskId.getNodeId())) { // Only set the ban if the node is the part of the cluster - banedParents.put(ban, reason); + banedParents.put(parentTaskId, reason); } } // Now go through already running tasks and cancel them for (Map.Entry taskEntry : cancellableTasks.entrySet()) { CancellableTaskHolder holder = taskEntry.getValue(); - if (holder.hasParent(parentNode, parentId)) { + if (holder.hasParent(parentTaskId)) { holder.cancel(reason); } } @@ -217,9 +215,9 @@ public class TaskManager extends AbstractComponent implements ClusterStateListen *

* This method is called when a previously banned task finally cancelled */ - public void removeBan(String parentNode, long parentId) { - logger.trace("removing ban for the parent task {}:{} {}", parentNode, parentId); - banedParents.remove(new Tuple<>(parentNode, parentId)); + public void removeBan(TaskId parentTaskId) { + logger.trace("removing ban for the parent task {}", parentTaskId); + banedParents.remove(parentTaskId); } @Override @@ -228,14 +226,12 @@ public class TaskManager extends AbstractComponent implements ClusterStateListen synchronized (banedParents) { lastDiscoveryNodes = event.state().getNodes(); // Remove all bans that were registered by nodes that are no longer in the cluster state - Iterator> banIterator = banedParents.keySet().iterator(); + Iterator banIterator = banedParents.keySet().iterator(); while (banIterator.hasNext()) { - Tuple nodeAndTaskId = banIterator.next(); - String nodeId = nodeAndTaskId.v1(); - Long taskId = nodeAndTaskId.v2(); - if (lastDiscoveryNodes.nodeExists(nodeId) == false) { - logger.debug("Removing ban for the parent [{}:{}] on the node [{}], reason: the parent node is gone", nodeId, - taskId, event.state().getNodes().localNode()); + TaskId taskId = banIterator.next(); + if (lastDiscoveryNodes.nodeExists(taskId.getNodeId()) == false) { + logger.debug("Removing ban for the parent [{}] on the node [{}], reason: the parent node is gone", taskId, + event.state().getNodes().localNode()); banIterator.remove(); } } @@ -244,10 +240,10 @@ public class TaskManager extends AbstractComponent implements ClusterStateListen for (Map.Entry taskEntry : cancellableTasks.entrySet()) { CancellableTaskHolder holder = taskEntry.getValue(); CancellableTask task = holder.getTask(); - String parent = task.getParentNode(); - if (parent != null && lastDiscoveryNodes.nodeExists(parent) == false) { + TaskId parentTaskId = task.getParentTaskId(); + if (parentTaskId.isSet() == false && lastDiscoveryNodes.nodeExists(parentTaskId.getNodeId()) == false) { if (task.cancelOnParentLeaving()) { - holder.cancel("Coordinating node [" + parent + "] left the cluster"); + holder.cancel("Coordinating node [" + parentTaskId.getNodeId() + "] left the cluster"); } } } @@ -340,8 +336,8 @@ public class TaskManager extends AbstractComponent implements ClusterStateListen } - public boolean hasParent(String parentNode, long parentId) { - return parentId == task.getParentId() && parentNode.equals(task.getParentNode()); + public boolean hasParent(TaskId parentTaskId) { + return task.getParentTaskId().equals(parentTaskId); } public CancellableTask getTask() { diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index d04966bc2ca..cb2c05eedd7 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -54,8 +54,8 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; import java.util.function.Supplier; @@ -71,7 +71,7 @@ public class TransportService extends AbstractLifecycleComponent tracelLogExclude) { this.tracelLogExclude = tracelLogExclude.toArray(Strings.EMPTY_ARRAY); } + @Override protected void doStart() { adapter.rxMetric.clear(); @@ -179,14 +180,10 @@ public class TransportService extends AbstractLifecycleComponent address) { return transport.addressSupported(address); } @@ -302,7 +308,7 @@ public class TransportService extends AbstractLifecycleComponent(new ContextRestoreResponseHandler(threadPool.getThreadContext().newStoredContext(), handler), node, action, timeoutHandler)); - if (started.get() == false) { + if (lifecycle.stoppedOrClosed()) { // if we are not started the exception handling will remove the RequestHolder again and calls the handler to notify the caller. // it will only notify if the toStop code hasn't done the work yet. throw new TransportException("TransportService is closed stopped can't send request"); @@ -405,10 +411,11 @@ public class TransportService extends AbstractLifecycleComponent void registerRequestHandler(String action, Supplier requestFactory, String executor, TransportRequestHandler handler) { RequestHandlerRegistry reg = new RequestHandlerRegistry<>(action, requestFactory, taskManager, handler, executor, false); @@ -417,11 +424,12 @@ public class TransportService extends AbstractLifecycleComponent void registerRequestHandler(String action, Supplier request, String executor, boolean forceExecution, TransportRequestHandler handler) { RequestHandlerRegistry reg = new RequestHandlerRegistry<>(action, request, taskManager, handler, executor, forceExecution); @@ -494,6 +502,11 @@ public class TransportService extends AbstractLifecycleComponent implements TransportResponseHandler { private final TransportResponseHandler delegate; private final ThreadContext.StoredContext threadContext; + private ContextRestoreResponseHandler(ThreadContext.StoredContext threadContext, TransportResponseHandler delegate) { this.delegate = delegate; this.threadContext = threadContext; @@ -766,7 +780,7 @@ public class TransportService extends AbstractLifecycleComponent Simulate issuing cancel request on the node that is about to leave the cluster"); // Simulate issuing cancel request on the node that is about to leave the cluster - CancelTasksRequest request = new CancelTasksRequest(testNodes[0].discoveryNode.getId()); + CancelTasksRequest request = new CancelTasksRequest(); request.reason("Testing Cancellation"); - request.taskId(mainTask.getId()); + request.taskId(new TaskId(testNodes[0].discoveryNode.getId(), mainTask.getId())); // And send the cancellation request to a random node CancelTasksResponse response = testNodes[0].transportCancelTasksAction.execute(request).get(); logger.info("--> Done simulating issuing cancel request on the node that is about to leave the cluster"); @@ -354,7 +356,7 @@ public class CancellableTasksTests extends TaskManagerTestCase { // Make sure that tasks are no longer running try { ListTasksResponse listTasksResponse1 = testNodes[randomIntBetween(1, testNodes.length - 1)] - .transportListTasksAction.execute(new ListTasksRequest().parentNode(mainNode).taskId(mainTask.getId())).get(); + .transportListTasksAction.execute(new ListTasksRequest().taskId(new TaskId(mainNode, mainTask.getId()))).get(); assertEquals(0, listTasksResponse1.getTasks().size()); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 60844a9b79a..4dcf54b5d0b 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -203,6 +203,7 @@ public abstract class TaskManagerTestCase extends ESTestCase { actionFilters, indexNameExpressionResolver); transportCancelTasksAction = new TransportCancelTasksAction(settings, clusterName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver); + transportService.acceptIncomingRequests(); } public final TestClusterService clusterService; diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index d35704a9353..eaa3caf9084 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -110,7 +110,7 @@ public class TasksIT extends ESIntegTestCase { List tasks = findEvents(ClusterHealthAction.NAME, Tuple::v1); // Verify that one of these tasks is a parent of another task - if (tasks.get(0).getParentNode() == null) { + if (tasks.get(0).getParentTaskId().isSet()) { assertParentTask(Collections.singletonList(tasks.get(1)), tasks.get(0)); } else { assertParentTask(Collections.singletonList(tasks.get(0)), tasks.get(1)); @@ -227,7 +227,9 @@ public class TasksIT extends ESIntegTestCase { } else { // A [s][r] level task should have a corresponding [s] level task on the a different node (where primary is located) sTask = findEvents(RefreshAction.NAME + "[s]", - event -> event.v1() && taskInfo.getParentNode().equals(event.v2().getNode().getId()) && taskInfo.getDescription().equals(event.v2().getDescription())); + event -> event.v1() && taskInfo.getParentTaskId().getNodeId().equals(event.v2().getNode().getId()) && taskInfo + .getDescription() + .equals(event.v2().getDescription())); } // There should be only one parent task assertEquals(1, sTask.size()); @@ -393,9 +395,10 @@ public class TasksIT extends ESIntegTestCase { */ private void assertParentTask(List tasks, TaskInfo parentTask) { for (TaskInfo task : tasks) { - assertNotNull(task.getParentNode()); - assertEquals(parentTask.getNode().getId(), task.getParentNode()); - assertEquals(parentTask.getId(), task.getParentId()); + assertFalse(task.getParentTaskId().isSet()); + assertEquals(parentTask.getNode().getId(), task.getParentTaskId().getNodeId()); + assertTrue(Strings.hasLength(task.getParentTaskId().getNodeId())); + assertEquals(parentTask.getId(), task.getParentTaskId().getId()); } } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index 5b3736de793..0d4372a51eb 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -84,8 +85,8 @@ public class TestTaskPlugin extends Plugin { private volatile boolean blocked = true; - public TestTask(long id, String type, String action, String description, String parentNode, long parentId) { - super(id, type, action, description, parentNode, parentId); + public TestTask(long id, String type, String action, String description, TaskId parentTaskId) { + super(id, type, action, description, parentTaskId); } public boolean isBlocked() { @@ -172,8 +173,8 @@ public class TestTaskPlugin extends Plugin { } @Override - public Task createTask(long id, String type, String action, String parentTaskNode, long parentTaskId) { - return new TestTask(id, type, action, this.getDescription(), parentTaskNode, parentTaskId); + public Task createTask(long id, String type, String action, TaskId parentTaskId) { + return new TestTask(id, type, action, this.getDescription(), parentTaskId); } } diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 19fd017c3cc..b4464dc9f58 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -43,13 +43,11 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.transport.local.LocalTransport; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.BeforeClass; + import java.io.IOException; import java.util.ArrayList; @@ -103,9 +101,9 @@ public class TransportTasksActionTests extends TaskManagerTestCase { } @Override - public Task createTask(long id, String type, String action, String parentTaskNode, long parentTaskId) { + public Task createTask(long id, String type, String action, TaskId parentTaskId) { if (enableTaskManager) { - return super.createTask(id, type, action, parentTaskNode, parentTaskId); + return super.createTask(id, type, action, parentTaskId); } else { return null; } @@ -313,7 +311,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { } Task task = actions[0].execute(request, listener); logger.info("Awaiting for all actions to start"); - actionLatch.await(); + assertTrue(actionLatch.await(10, TimeUnit.SECONDS)); logger.info("Done waiting for all actions to start"); return task; } @@ -426,14 +424,13 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Find tasks with common parent listTasksRequest = new ListTasksRequest(); - listTasksRequest.parentNode(parentNode); - listTasksRequest.parentTaskId(parentTaskId); + listTasksRequest.parentTaskId(new TaskId(parentNode, parentTaskId)); response = testNode.transportListTasksAction.execute(listTasksRequest).get(); assertEquals(testNodes.length, response.getTasks().size()); for (TaskInfo task : response.getTasks()) { assertEquals("testAction[n]", task.getAction()); - assertEquals(parentNode, task.getParentNode()); - assertEquals(parentTaskId, task.getParentId()); + assertEquals(parentNode, task.getParentTaskId().getNodeId()); + assertEquals(parentTaskId, task.getParentTaskId().getId()); } // Release all tasks and wait for response @@ -514,7 +511,8 @@ public class TransportTasksActionTests extends TaskManagerTestCase { String actionName = "testAction"; // only pick the main action // Try to cancel main task using action name - CancelTasksRequest request = new CancelTasksRequest(testNodes[0].discoveryNode.getId()); + CancelTasksRequest request = new CancelTasksRequest(); + request.nodesIds(testNodes[0].discoveryNode.getId()); request.reason("Testing Cancellation"); request.actions(actionName); CancelTasksResponse response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request) @@ -527,9 +525,9 @@ public class TransportTasksActionTests extends TaskManagerTestCase { // Try to cancel main task using id - request = new CancelTasksRequest(testNodes[0].discoveryNode.getId()); + request = new CancelTasksRequest(); request.reason("Testing Cancellation"); - request.taskId(task.getId()); + request.taskId(new TaskId(testNodes[0].discoveryNode.getId(), task.getId())); response = testNodes[randomIntBetween(0, testNodes.length - 1)].transportCancelTasksAction.execute(request).get(); // Shouldn't match any tasks since testAction doesn't support cancellation @@ -601,7 +599,7 @@ public class TransportTasksActionTests extends TaskManagerTestCase { @Override protected TestTaskResponse taskOperation(TestTasksRequest request, Task task) { logger.info("Task action on node " + node); - if (failTaskOnNode == node && task.getParentNode() != null) { + if (failTaskOnNode == node && task.getParentTaskId().isSet() == false) { logger.info("Failing on node " + node); throw new RuntimeException("Task level failure"); } diff --git a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 66fb8aa3f21..30552201dfd 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -150,9 +150,9 @@ public class BulkRequestTests extends ESTestCase { BulkRequest bulkRequest = new BulkRequest(); try { bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null); - fail("should have thrown an exception about the unknown paramater _foo"); + fail("should have thrown an exception about the unknown parameter _foo"); } catch (IllegalArgumentException e) { - assertThat("message contains error about the unknown paramater _foo: " + e.getMessage(), + assertThat("message contains error about the unknown parameter _foo: " + e.getMessage(), e.getMessage().contains("Action/metadata line [3] contains an unknown parameter [_foo]"), equalTo(true)); } } diff --git a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 2d6833db7d9..2615e5a0b22 100644 --- a/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -185,6 +185,7 @@ public class TransportBroadcastByNodeActionTests extends ESTestCase { clusterService = new TestClusterService(THREAD_POOL); final TransportService transportService = new TransportService(transport, THREAD_POOL); transportService.start(); + transportService.acceptIncomingRequests(); setClusterState(clusterService, TEST_INDEX); action = new TestTransportBroadcastByNodeAction( Settings.EMPTY, diff --git a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java index c9a7d9bd2d2..860f95ace55 100644 --- a/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/master/TransportMasterNodeActionTests.java @@ -84,6 +84,7 @@ public class TransportMasterNodeActionTests extends ESTestCase { clusterService = new TestClusterService(threadPool); transportService = new TransportService(transport, threadPool); transportService.start(); + transportService.acceptIncomingRequests(); localNode = new DiscoveryNode("local_node", DummyTransportAddress.INSTANCE, Version.CURRENT); remoteNode = new DiscoveryNode("remote_node", DummyTransportAddress.INSTANCE, Version.CURRENT); allNodes = new DiscoveryNode[] { localNode, remoteNode }; diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index 7b9fd91a567..03869974444 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -88,6 +88,7 @@ public class BroadcastReplicationTests extends ESTestCase { clusterService = new TestClusterService(threadPool); transportService = new TransportService(transport, threadPool); transportService.start(); + transportService.acceptIncomingRequests(); broadcastReplicationAction = new TestBroadcastReplicationAction(Settings.EMPTY, threadPool, clusterService, transportService, new ActionFilters(new HashSet()), new IndexNameExpressionResolver(Settings.EMPTY), null); } diff --git a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java index 7cf0a4f3b50..3fc33477746 100644 --- a/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/replication/TransportReplicationActionTests.java @@ -18,8 +18,6 @@ */ package org.elasticsearch.action.support.replication; -import com.carrotsearch.randomizedtesting.annotations.Repeat; - import org.apache.lucene.index.CorruptIndexException; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ReplicationResponse; @@ -126,6 +124,7 @@ public class TransportReplicationActionTests extends ESTestCase { clusterService = new TestClusterService(threadPool); transportService = new TransportService(transport, threadPool); transportService.start(); + transportService.acceptIncomingRequests(); action = new Action(Settings.EMPTY, "testAction", transportService, clusterService, threadPool); count.set(1); } @@ -1016,7 +1015,7 @@ public class TransportReplicationActionTests extends ESTestCase { * half the time. */ private ReplicationTask maybeTask() { - return random().nextBoolean() ? new ReplicationTask(0, null, null, null, null, 0) : null; + return random().nextBoolean() ? new ReplicationTask(0, null, null, null, null) : null; } /** diff --git a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java index 344846c363e..2bfc12c0883 100644 --- a/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationActionTests.java @@ -141,6 +141,7 @@ public class TransportInstanceSingleOperationActionTests extends ESTestCase { clusterService = new TestClusterService(THREAD_POOL); transportService = new TransportService(transport, THREAD_POOL); transportService.start(); + transportService.acceptIncomingRequests(); action = new TestTransportInstanceSingleOperationAction( Settings.EMPTY, "indices:admin/test", diff --git a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index 79f09727b95..ca83deeef1b 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -73,6 +73,7 @@ public class TransportClientNodesServiceTests extends ESTestCase { }; transportService = new TransportService(Settings.EMPTY, transport, threadPool, new NamedWriteableRegistry()); transportService.start(); + transportService.acceptIncomingRequests(); transportClientNodesService = new TransportClientNodesService(Settings.EMPTY, ClusterName.DEFAULT, transportService, threadPool, Version.CURRENT); nodesCount = randomIntBetween(1, 10); diff --git a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index 59692b5febb..c4031edc2d6 100644 --- a/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -107,6 +107,7 @@ public class ShardStateActionTests extends ESTestCase { clusterService = new TestClusterService(THREAD_POOL); transportService = new TransportService(transport, THREAD_POOL); transportService.start(); + transportService.acceptIncomingRequests(); shardStateAction = new TestShardStateAction(Settings.EMPTY, clusterService, transportService, null, null); shardStateAction.setOnBeforeWaitForNewMasterAndRetry(() -> {}); shardStateAction.setOnAfterWaitForNewMasterAndRetry(() -> {}); diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 1f98275aee7..1c61292d87c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -674,7 +674,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase { } else { try { indexNameExpressionResolver.concreteIndices(context, "Foo*"); - fail("expecting exeption when result empty and allowNoIndicec=false"); + fail("expecting exception when result empty and allowNoIndicec=false"); } catch (IndexNotFoundException e) { // expected exception } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java index d37aee16a11..b5ce103aeed 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/ClusterRebalanceRoutingTests.java @@ -713,7 +713,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase { assertThat(routingTable.index("test1").shard(i).primaryShard().state(), equalTo(INITIALIZING)); } - logger.debug("now start initializing shards and expect exactly one rebalance from node1 to node 2 sicne index [test] is all on node1"); + logger.debug("now start initializing shards and expect exactly one rebalance from node1 to node 2 since index [test] is all on node1"); routingNodes = clusterState.getRoutingNodes(); routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState("test1", INITIALIZING)).routingTable(); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java index df0c4a8fa3e..a4cd47bd24b 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/RebalanceAfterActiveTests.java @@ -181,7 +181,7 @@ public class RebalanceAfterActiveTests extends ESAllocationTestCase { } - logger.info("complete relocation, thats it!"); + logger.info("complete relocation, that's it!"); routingNodes = clusterState.getRoutingNodes(); prevRoutingTable = routingTable; routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable(); diff --git a/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java b/core/src/test/java/org/elasticsearch/common/logging/ESLoggerTests.java similarity index 81% rename from core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java rename to core/src/test/java/org/elasticsearch/common/logging/ESLoggerTests.java index ed8a5cffbf4..67a6c0555c5 100644 --- a/core/src/test/java/org/elasticsearch/common/logging/log4j/Log4jESLoggerTests.java +++ b/core/src/test/java/org/elasticsearch/common/logging/ESLoggerTests.java @@ -17,15 +17,13 @@ * under the License. */ -package org.elasticsearch.common.logging.log4j; +package org.elasticsearch.common.logging; import org.apache.log4j.AppenderSkeleton; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.spi.LocationInfo; import org.apache.log4j.spi.LoggingEvent; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; @@ -38,7 +36,7 @@ import java.util.List; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; -public class Log4jESLoggerTests extends ESTestCase { +public class ESLoggerTests extends ESTestCase { private ESLogger esTestLogger; private TestAppender testAppender; @@ -49,7 +47,7 @@ public class Log4jESLoggerTests extends ESTestCase { @Override public void setUp() throws Exception { super.setUp(); - this.testLevel = Log4jESLoggerFactory.getLogger("test").getLevel(); + this.testLevel = ESLoggerFactory.getLogger("test").getLevel(); LogConfigurator.reset(); Path configDir = getDataPath("config"); // Need to set custom path.conf so we can use a custom logging.yml file for the test @@ -59,18 +57,18 @@ public class Log4jESLoggerTests extends ESTestCase { .build(); LogConfigurator.configure(settings, true); - esTestLogger = Log4jESLoggerFactory.getLogger("test"); - Logger testLogger = ((Log4jESLogger) esTestLogger).logger(); + esTestLogger = ESLoggerFactory.getLogger("test"); + Logger testLogger = esTestLogger.getLogger(); assertThat(testLogger.getLevel(), equalTo(Level.TRACE)); testAppender = new TestAppender(); testLogger.addAppender(testAppender); // deprecation setup, needs to be set to debug to log - deprecationLogger = Log4jESLoggerFactory.getDeprecationLogger("test"); + deprecationLogger = ESLoggerFactory.getDeprecationLogger("test"); deprecationAppender = new TestAppender(); - ESLogger logger = Log4jESLoggerFactory.getLogger("deprecation.test"); + ESLogger logger = ESLoggerFactory.getLogger("deprecation.test"); logger.setLevel("DEBUG"); - (((Log4jESLogger) logger).logger()).addAppender(deprecationAppender); + logger.getLogger().addAppender(deprecationAppender); } @Override @@ -78,9 +76,9 @@ public class Log4jESLoggerTests extends ESTestCase { public void tearDown() throws Exception { super.tearDown(); esTestLogger.setLevel(testLevel); - Logger testLogger = ((Log4jESLogger) esTestLogger).logger(); + Logger testLogger = esTestLogger.getLogger(); testLogger.removeAppender(testAppender); - Logger deprecationLogger = ((Log4jESLogger) Log4jESLoggerFactory.getLogger("deprecation.test")).logger(); + Logger deprecationLogger = ESLoggerFactory.getLogger("deprecation.test").getLogger(); deprecationLogger.removeAppender(deprecationAppender); } @@ -99,7 +97,7 @@ public class Log4jESLoggerTests extends ESTestCase { assertThat(event.getRenderedMessage(), equalTo("This is an error")); LocationInfo locationInfo = event.getLocationInformation(); assertThat(locationInfo, notNullValue()); - assertThat(locationInfo.getClassName(), equalTo(Log4jESLoggerTests.class.getCanonicalName())); + assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName())); assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest")); event = events.get(1); assertThat(event, notNullValue()); @@ -107,7 +105,7 @@ public class Log4jESLoggerTests extends ESTestCase { assertThat(event.getRenderedMessage(), equalTo("This is a warning")); locationInfo = event.getLocationInformation(); assertThat(locationInfo, notNullValue()); - assertThat(locationInfo.getClassName(), equalTo(Log4jESLoggerTests.class.getCanonicalName())); + assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName())); assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest")); event = events.get(2); assertThat(event, notNullValue()); @@ -115,7 +113,7 @@ public class Log4jESLoggerTests extends ESTestCase { assertThat(event.getRenderedMessage(), equalTo("This is an info")); locationInfo = event.getLocationInformation(); assertThat(locationInfo, notNullValue()); - assertThat(locationInfo.getClassName(), equalTo(Log4jESLoggerTests.class.getCanonicalName())); + assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName())); assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest")); event = events.get(3); assertThat(event, notNullValue()); @@ -123,7 +121,7 @@ public class Log4jESLoggerTests extends ESTestCase { assertThat(event.getRenderedMessage(), equalTo("This is a debug")); locationInfo = event.getLocationInformation(); assertThat(locationInfo, notNullValue()); - assertThat(locationInfo.getClassName(), equalTo(Log4jESLoggerTests.class.getCanonicalName())); + assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName())); assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest")); event = events.get(4); assertThat(event, notNullValue()); @@ -131,7 +129,7 @@ public class Log4jESLoggerTests extends ESTestCase { assertThat(event.getRenderedMessage(), equalTo("This is a trace")); locationInfo = event.getLocationInformation(); assertThat(locationInfo, notNullValue()); - assertThat(locationInfo.getClassName(), equalTo(Log4jESLoggerTests.class.getCanonicalName())); + assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName())); assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest")); } diff --git a/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java b/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java similarity index 90% rename from core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java rename to core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java index 5d90edaf7a5..a6dda573304 100644 --- a/core/src/test/java/org/elasticsearch/common/logging/log4j/LoggingConfigurationTests.java +++ b/core/src/test/java/org/elasticsearch/common/logging/LoggingConfigurationTests.java @@ -17,12 +17,11 @@ * under the License. */ -package org.elasticsearch.common.logging.log4j; +package org.elasticsearch.common.logging; import org.apache.log4j.Appender; import org.apache.log4j.Logger; import org.elasticsearch.common.cli.CliToolTestCase; -import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.node.internal.InternalSettingsPreparer; @@ -50,7 +49,7 @@ public class LoggingConfigurationTests extends ESTestCase { } public void testResolveMultipleConfigs() throws Exception { - String level = Log4jESLoggerFactory.getLogger("test").getLevel(); + String level = ESLoggerFactory.getLogger("test").getLevel(); try { Path configDir = getDataPath("config"); Settings settings = Settings.builder() @@ -59,22 +58,22 @@ public class LoggingConfigurationTests extends ESTestCase { .build(); LogConfigurator.configure(settings, true); - ESLogger esLogger = Log4jESLoggerFactory.getLogger("test"); - Logger logger = ((Log4jESLogger) esLogger).logger(); + ESLogger esLogger = ESLoggerFactory.getLogger("test"); + Logger logger = esLogger.getLogger(); Appender appender = logger.getAppender("console"); assertThat(appender, notNullValue()); - esLogger = Log4jESLoggerFactory.getLogger("second"); - logger = ((Log4jESLogger) esLogger).logger(); + esLogger = ESLoggerFactory.getLogger("second"); + logger = esLogger.getLogger(); appender = logger.getAppender("console2"); assertThat(appender, notNullValue()); - esLogger = Log4jESLoggerFactory.getLogger("third"); - logger = ((Log4jESLogger) esLogger).logger(); + esLogger = ESLoggerFactory.getLogger("third"); + logger = esLogger.getLogger(); appender = logger.getAppender("console3"); assertThat(appender, notNullValue()); } finally { - Log4jESLoggerFactory.getLogger("test").setLevel(level); + ESLoggerFactory.getLogger("test").setLevel(level); } } @@ -166,8 +165,8 @@ public class LoggingConfigurationTests extends ESTestCase { .build(), new CliToolTestCase.MockTerminal()); LogConfigurator.configure(environment.settings(), true); // args should overwrite whatever is in the config - ESLogger esLogger = Log4jESLoggerFactory.getLogger("test_resolve_order"); - Logger logger = ((Log4jESLogger) esLogger).logger(); + ESLogger esLogger = ESLoggerFactory.getLogger("test_resolve_order"); + Logger logger = esLogger.getLogger(); Appender appender = logger.getAppender("console"); assertThat(appender, notNullValue()); assertTrue(logger.isTraceEnabled()); @@ -190,10 +189,10 @@ public class LoggingConfigurationTests extends ESTestCase { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(), new CliToolTestCase.MockTerminal()); LogConfigurator.configure(environment.settings(), false); - ESLogger esLogger = Log4jESLoggerFactory.getLogger("test_config_not_read"); + ESLogger esLogger = ESLoggerFactory.getLogger("test_config_not_read"); assertNotNull(esLogger); - Logger logger = ((Log4jESLogger) esLogger).logger(); + Logger logger = esLogger.getLogger(); Appender appender = logger.getAppender("console"); // config was not read assertNull(appender); diff --git a/core/src/test/java/org/elasticsearch/common/logging/jdk/JDKESLoggerTests.java b/core/src/test/java/org/elasticsearch/common/logging/jdk/JDKESLoggerTests.java deleted file mode 100644 index 92dd9ffc012..00000000000 --- a/core/src/test/java/org/elasticsearch/common/logging/jdk/JDKESLoggerTests.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.logging.jdk; - -import org.elasticsearch.common.logging.ESLogger; -import org.elasticsearch.test.ESTestCase; - -import java.util.ArrayList; -import java.util.List; -import java.util.logging.Handler; -import java.util.logging.Level; -import java.util.logging.LogRecord; -import java.util.logging.Logger; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; - -public class JDKESLoggerTests extends ESTestCase { - - private ESLogger esTestLogger; - private TestHandler testHandler; - - @Override - public void setUp() throws Exception { - super.setUp(); - - JdkESLoggerFactory esTestLoggerFactory = new JdkESLoggerFactory(); - esTestLogger = esTestLoggerFactory.newInstance("test"); - Logger testLogger = ((JdkESLogger) esTestLogger).logger(); - testLogger.setLevel(Level.FINEST); - assertThat(testLogger.getLevel(), equalTo(Level.FINEST)); - testHandler = new TestHandler(); - testLogger.addHandler(testHandler); - } - - public void testLocationInfoTest() { - esTestLogger.error("This is an error"); - esTestLogger.warn("This is a warning"); - esTestLogger.info("This is an info"); - esTestLogger.debug("This is a debug"); - esTestLogger.trace("This is a trace"); - List records = testHandler.getEvents(); - assertThat(records, notNullValue()); - assertThat(records.size(), equalTo(5)); - LogRecord record = records.get(0); - assertThat(record, notNullValue()); - assertThat(record.getLevel(), equalTo(Level.SEVERE)); - assertThat(record.getMessage(), equalTo("This is an error")); - assertThat(record.getSourceClassName(), equalTo(JDKESLoggerTests.class.getCanonicalName())); - assertThat(record.getSourceMethodName(), equalTo("testLocationInfoTest")); - record = records.get(1); - assertThat(record, notNullValue()); - assertThat(record.getLevel(), equalTo(Level.WARNING)); - assertThat(record.getMessage(), equalTo("This is a warning")); - assertThat(record.getSourceClassName(), equalTo(JDKESLoggerTests.class.getCanonicalName())); - assertThat(record.getSourceMethodName(), equalTo("testLocationInfoTest")); - record = records.get(2); - assertThat(record, notNullValue()); - assertThat(record.getLevel(), equalTo(Level.INFO)); - assertThat(record.getMessage(), equalTo("This is an info")); - assertThat(record.getSourceClassName(), equalTo(JDKESLoggerTests.class.getCanonicalName())); - assertThat(record.getSourceMethodName(), equalTo("testLocationInfoTest")); - record = records.get(3); - assertThat(record, notNullValue()); - assertThat(record.getLevel(), equalTo(Level.FINE)); - assertThat(record.getMessage(), equalTo("This is a debug")); - assertThat(record.getSourceClassName(), equalTo(JDKESLoggerTests.class.getCanonicalName())); - assertThat(record.getSourceMethodName(), equalTo("testLocationInfoTest")); - record = records.get(4); - assertThat(record, notNullValue()); - assertThat(record.getLevel(), equalTo(Level.FINEST)); - assertThat(record.getMessage(), equalTo("This is a trace")); - assertThat(record.getSourceClassName(), equalTo(JDKESLoggerTests.class.getCanonicalName())); - assertThat(record.getSourceMethodName(), equalTo("testLocationInfoTest")); - } - - public void testSetLogLevelString() { - // verify the string based level-setters - esTestLogger.setLevel("error"); - assertThat(esTestLogger.getLevel(), equalTo("SEVERE")); - esTestLogger.setLevel("warn"); - assertThat(esTestLogger.getLevel(), equalTo("WARNING")); - esTestLogger.setLevel("info"); - assertThat(esTestLogger.getLevel(), equalTo("INFO")); - esTestLogger.setLevel("debug"); - assertThat(esTestLogger.getLevel(), equalTo("FINE")); - esTestLogger.setLevel("trace"); - assertThat(esTestLogger.getLevel(), equalTo("FINEST")); - } - - private static class TestHandler extends Handler { - - private List records = new ArrayList<>(); - - @Override - public void close() { - } - - public List getEvents() { - return records; - } - - @Override - public void publish(LogRecord record) { - // Forces it to generate the location information - record.getSourceClassName(); - records.add(record); - } - - @Override - public void flush() { - } - } -} diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index 58f5cde65ce..6cc9912924d 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -31,9 +31,12 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; public class ScopedSettingsTests extends ESTestCase { @@ -299,4 +302,25 @@ public class ScopedSettingsTests extends ESTestCase { ESLoggerFactory.getRootLogger().setLevel(level); } } + + public void testOverlappingComplexMatchSettings() { + Set> settings = new LinkedHashSet<>(2); + final boolean groupFirst = randomBoolean(); + final Setting groupSetting = Setting.groupSetting("foo.", false, Setting.Scope.CLUSTER); + final Setting listSetting = Setting.listSetting("foo.bar", Collections.emptyList(), Function.identity(), false, + Setting.Scope.CLUSTER); + settings.add(groupFirst ? groupSetting : listSetting); + settings.add(groupFirst ? listSetting : groupSetting); + + try { + new ClusterSettings(Settings.EMPTY, settings); + fail("an exception should have been thrown because settings overlap"); + } catch (IllegalArgumentException e) { + if (groupFirst) { + assertEquals("complex setting key: [foo.bar] overlaps existing setting key: [foo.]", e.getMessage()); + } else { + assertEquals("complex setting key: [foo.] overlaps existing setting key: [foo.bar]", e.getMessage()); + } + } + } } diff --git a/core/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java b/core/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java index 18591d9a592..d7f10891f28 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java @@ -58,7 +58,7 @@ public class JsonSettingsLoaderTests extends ESTestCase { fail("expected exception"); } catch (SettingsException e) { assertEquals(e.getCause().getClass(), ElasticsearchParseException.class); - assertTrue(e.toString().contains("duplicate settings key [foo] found at line number [1], column number [13], previous value [bar], current value [baz]")); + assertTrue(e.toString().contains("duplicate settings key [foo] found at line number [1], column number [20], previous value [bar], current value [baz]")); } } } diff --git a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java index f0c3bab86f0..65484b81c79 100644 --- a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java @@ -108,6 +108,7 @@ public class ZenFaultDetectionTests extends ESTestCase { MockTransportService transportService = new MockTransportService(Settings.EMPTY, new LocalTransport(settings, threadPool, version, namedWriteableRegistry), threadPool, namedWriteableRegistry); transportService.start(); + transportService.acceptIncomingRequests(); return transportService; } diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java index f44faea19a8..b247dad069e 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ping/unicast/UnicastZenPingIT.java @@ -58,12 +58,14 @@ public class UnicastZenPingIT extends ESTestCase { NettyTransport transportA = new NettyTransport(settings, threadPool, networkService, BigArrays.NON_RECYCLING_INSTANCE, Version.CURRENT, new NamedWriteableRegistry()); final TransportService transportServiceA = new TransportService(transportA, threadPool).start(); + transportServiceA.acceptIncomingRequests(); final DiscoveryNode nodeA = new DiscoveryNode("UZP_A", transportServiceA.boundAddress().publishAddress(), Version.CURRENT); InetSocketTransportAddress addressA = (InetSocketTransportAddress) transportA.boundAddress().publishAddress(); NettyTransport transportB = new NettyTransport(settings, threadPool, networkService, BigArrays.NON_RECYCLING_INSTANCE, Version.CURRENT, new NamedWriteableRegistry()); final TransportService transportServiceB = new TransportService(transportB, threadPool).start(); + transportServiceB.acceptIncomingRequests(); final DiscoveryNode nodeB = new DiscoveryNode("UZP_B", transportServiceA.boundAddress().publishAddress(), Version.CURRENT); InetSocketTransportAddress addressB = (InetSocketTransportAddress) transportB.boundAddress().publishAddress(); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java index dfaf407f850..72f258e5fca 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PendingClusterStatesQueueTests.java @@ -112,7 +112,7 @@ public class PendingClusterStatesQueueTests extends ESTestCase { for (ClusterStateContext context : queue.pendingStates) { final String pendingMaster = context.state.nodes().masterNodeId(); assertThat("found a cluster state from [" + pendingMaster - + "], after a state from [" + processedMaster + "] was proccessed", + + "], after a state from [" + processedMaster + "] was processed", pendingMaster, equalTo(processedMaster)); } // and check all committed contexts from another master were failed diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java index 2658af1b776..224ecbdf619 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java @@ -232,6 +232,7 @@ public class PublishClusterStateActionTests extends ESTestCase { protected MockTransportService buildTransportService(Settings settings, Version version) { MockTransportService transportService = MockTransportService.local(Settings.EMPTY, version, threadPool); transportService.start(); + transportService.acceptIncomingRequests(); return transportService; } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java index b6a7dee23ba..140fa11c810 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/date/SimpleDateMappingTests.java @@ -156,7 +156,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase { .startObject() .field("date_field_en", "Wed, 06 Dec 2000 02:55:00 -0800") .field("date_field_de", "Mi, 06 Dez 2000 02:55:00 -0800") - .field("date_field_default", "Wed, 06 Dec 2000 02:55:00 -0800") // check default - no exception is a successs! + .field("date_field_default", "Wed, 06 Dec 2000 02:55:00 -0800") // check default - no exception is a success! .endObject() .bytes()); assertNumericTokensEqual(doc, defaultMapper, "date_field_en", "date_field_de"); diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index 9fe8995e65f..8881624c206 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -525,7 +525,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { try { createIndex("test"); - fail("index creation should have failed due to alias with existing index name in mathching index template"); + fail("index creation should have failed due to alias with existing index name in matching index template"); } catch(InvalidAliasNameException e) { assertThat(e.getMessage(), equalTo("Invalid alias name [index], an index exists with the same name as the alias")); } diff --git a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java index 9c03147ca1a..9e27e84e66a 100644 --- a/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java +++ b/core/src/test/java/org/elasticsearch/test/NoopDiscovery.java @@ -72,6 +72,11 @@ public class NoopDiscovery implements Discovery { return null; } + @Override + public void startInitialJoin() { + + } + @Override public int getMinimumMasterNodes() { return -1; diff --git a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 19254c12706..f65ce317d72 100644 --- a/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/core/src/test/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -40,6 +40,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static java.util.Collections.emptyMap; @@ -74,11 +75,13 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { Settings.builder().put("name", "TS_A", TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "", TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING").build(), version0, new NamedWriteableRegistry() ); + serviceA.acceptIncomingRequests(); nodeA = new DiscoveryNode("TS_A", "TS_A", serviceA.boundAddress().publishAddress(), emptyMap(), version0); serviceB = build( Settings.builder().put("name", "TS_B", TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "", TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING").build(), version1, new NamedWriteableRegistry() ); + serviceB.acceptIncomingRequests(); nodeB = new DiscoveryNode("TS_B", "TS_B", serviceB.boundAddress().publishAddress(), emptyMap(), version1); // wait till all nodes are properly connected and the event has been sent, so tests in this class @@ -1254,6 +1257,54 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { assertTrue(nodeB.address().sameHost(addressB.get())); } + public void testBlockingIncomingRequests() throws Exception { + TransportService service = build( + Settings.builder().put("name", "TS_TEST", TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "", TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING").build(), + version0, new NamedWriteableRegistry() + ); + AtomicBoolean requestProcessed = new AtomicBoolean(); + service.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, + (request, channel) -> { + requestProcessed.set(true); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + }); + + DiscoveryNode node = new DiscoveryNode("TS_TEST", "TS_TEST", service.boundAddress().publishAddress(), emptyMap(), version0); + serviceA.connectToNode(node); + + CountDownLatch latch = new CountDownLatch(1); + serviceA.sendRequest(node, "action", new TestRequest(), new TransportResponseHandler() { + @Override + public TestResponse newInstance() { + return new TestResponse(); + } + + @Override + public void handleResponse(TestResponse response) { + latch.countDown(); + } + + @Override + public void handleException(TransportException exp) { + latch.countDown(); + } + + @Override + public String executor() { + return ThreadPool.Names.SAME; + } + }); + + assertFalse(requestProcessed.get()); + + service.acceptIncomingRequests(); + assertBusy(() -> assertTrue(requestProcessed.get())); + + latch.await(); + service.close(); + + } + public static class TestRequest extends TransportRequest { } diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java index 4c7be93491f..3d6ff1cd067 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyScheduledPingTests.java @@ -56,11 +56,13 @@ public class NettyScheduledPingTests extends ESTestCase { final NettyTransport nettyA = new NettyTransport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, Version.CURRENT, registryA); MockTransportService serviceA = new MockTransportService(settings, nettyA, threadPool, registryA); serviceA.start(); + serviceA.acceptIncomingRequests(); NamedWriteableRegistry registryB = new NamedWriteableRegistry(); final NettyTransport nettyB = new NettyTransport(settings, threadPool, new NetworkService(settings), BigArrays.NON_RECYCLING_INSTANCE, Version.CURRENT, registryB); MockTransportService serviceB = new MockTransportService(settings, nettyB, threadPool, registryB); serviceB.start(); + serviceB.acceptIncomingRequests(); DiscoveryNode nodeA = new DiscoveryNode("TS_A", "TS_A", serviceA.boundAddress().publishAddress(), emptyMap(), Version.CURRENT); DiscoveryNode nodeB = new DiscoveryNode("TS_B", "TS_B", serviceB.boundAddress().publishAddress(), emptyMap(), Version.CURRENT); diff --git a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java index a8d06d39f59..ef408d16784 100644 --- a/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java +++ b/core/src/test/java/org/elasticsearch/transport/netty/NettyTransportIT.java @@ -83,7 +83,7 @@ public class NettyTransportIT extends ESIntegTestCase { assertThat(clusterIndexHealths.getStatus(), is(ClusterHealthStatus.GREEN)); try { transportClient.filterWithHeader(Collections.singletonMap("ERROR", "MY MESSAGE")).admin().cluster().prepareHealth().get(); - fail("Expected exception, but didnt happen"); + fail("Expected exception, but didn't happen"); } catch (ElasticsearchException e) { assertThat(e.getMessage(), containsString("MY MESSAGE")); assertThat(channelProfileName, is(TransportSettings.DEFAULT_PROFILE)); diff --git a/core/src/test/resources/org/elasticsearch/common/logging/log4j/config/logging.yml b/core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml similarity index 100% rename from core/src/test/resources/org/elasticsearch/common/logging/log4j/config/logging.yml rename to core/src/test/resources/org/elasticsearch/common/logging/config/logging.yml diff --git a/core/src/test/resources/org/elasticsearch/common/logging/log4j/config/test2/logging.yml b/core/src/test/resources/org/elasticsearch/common/logging/config/test2/logging.yml similarity index 100% rename from core/src/test/resources/org/elasticsearch/common/logging/log4j/config/test2/logging.yml rename to core/src/test/resources/org/elasticsearch/common/logging/config/test2/logging.yml diff --git a/core/src/test/resources/org/elasticsearch/common/logging/log4j/config/test2/test3/logging.yml b/core/src/test/resources/org/elasticsearch/common/logging/config/test2/test3/logging.yml similarity index 100% rename from core/src/test/resources/org/elasticsearch/common/logging/log4j/config/test2/test3/logging.yml rename to core/src/test/resources/org/elasticsearch/common/logging/config/test2/test3/logging.yml diff --git a/distribution/build.gradle b/distribution/build.gradle index dffb4ef12b2..d70f0254f3b 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -39,7 +39,7 @@ buildscript { } // this is common configuration for distributions, but we also add it here for the license check to use -ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api') +ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive() /***************************************************************************** diff --git a/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java index 5537bbfa646..2d90db2c5b7 100644 --- a/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/deb/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -24,12 +24,12 @@ import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -/** Rest integration test. runs against external cluster in 'mvn verify' */ +/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ public class RestIT extends ESRestTestCase { public RestIT(RestTestCandidate testCandidate) { super(testCandidate); } - // we run them all sequentially: start simple! + @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { return createParameters(0, 1); diff --git a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java index 5537bbfa646..2d90db2c5b7 100644 --- a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -24,12 +24,12 @@ import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -/** Rest integration test. runs against external cluster in 'mvn verify' */ +/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ public class RestIT extends ESRestTestCase { public RestIT(RestTestCandidate testCandidate) { super(testCandidate); } - // we run them all sequentially: start simple! + @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { return createParameters(0, 1); diff --git a/distribution/licenses/jackson-core-2.6.2.jar.sha1 b/distribution/licenses/jackson-core-2.6.2.jar.sha1 deleted file mode 100644 index 87d28eb9363..00000000000 --- a/distribution/licenses/jackson-core-2.6.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -123f29333b2c6b3516b14252b6e93226bfcd6e37 diff --git a/distribution/licenses/jackson-core-2.7.1.jar.sha1 b/distribution/licenses/jackson-core-2.7.1.jar.sha1 new file mode 100644 index 00000000000..73831ed2d51 --- /dev/null +++ b/distribution/licenses/jackson-core-2.7.1.jar.sha1 @@ -0,0 +1 @@ +4127b62db028f981e81caa248953c0899d720f98 \ No newline at end of file diff --git a/distribution/licenses/jackson-dataformat-cbor-2.6.2.jar.sha1 b/distribution/licenses/jackson-dataformat-cbor-2.6.2.jar.sha1 deleted file mode 100644 index 66757dbc917..00000000000 --- a/distribution/licenses/jackson-dataformat-cbor-2.6.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1e13c575f914c83761bb8e2aca7dfd9e4c647579 diff --git a/distribution/licenses/jackson-dataformat-cbor-2.7.1.jar.sha1 b/distribution/licenses/jackson-dataformat-cbor-2.7.1.jar.sha1 new file mode 100644 index 00000000000..19bb5e64610 --- /dev/null +++ b/distribution/licenses/jackson-dataformat-cbor-2.7.1.jar.sha1 @@ -0,0 +1 @@ +4282418817ad2be26ce18739461499eae679390f \ No newline at end of file diff --git a/distribution/licenses/jackson-dataformat-smile-2.6.2.jar.sha1 b/distribution/licenses/jackson-dataformat-smile-2.6.2.jar.sha1 deleted file mode 100644 index 2ae62f6111e..00000000000 --- a/distribution/licenses/jackson-dataformat-smile-2.6.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -395d18c1a1dd730b8026ee59c4067e5d2b45ba6e diff --git a/distribution/licenses/jackson-dataformat-smile-2.7.1.jar.sha1 b/distribution/licenses/jackson-dataformat-smile-2.7.1.jar.sha1 new file mode 100644 index 00000000000..45c78df1ba3 --- /dev/null +++ b/distribution/licenses/jackson-dataformat-smile-2.7.1.jar.sha1 @@ -0,0 +1 @@ +9ccde45d574388371d2c4032d4b853e2d596777e \ No newline at end of file diff --git a/distribution/licenses/jackson-dataformat-yaml-2.6.2.jar.sha1 b/distribution/licenses/jackson-dataformat-yaml-2.6.2.jar.sha1 deleted file mode 100644 index 385f04d4db3..00000000000 --- a/distribution/licenses/jackson-dataformat-yaml-2.6.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4ae23088dd3fae47c66843f2e4251d7255ee140e diff --git a/distribution/licenses/jackson-dataformat-yaml-2.7.1.jar.sha1 b/distribution/licenses/jackson-dataformat-yaml-2.7.1.jar.sha1 new file mode 100644 index 00000000000..01b442577b1 --- /dev/null +++ b/distribution/licenses/jackson-dataformat-yaml-2.7.1.jar.sha1 @@ -0,0 +1 @@ +6c5235a523b7d720b2b0e1b850ea14083e342b07 \ No newline at end of file diff --git a/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java index 5537bbfa646..2d90db2c5b7 100644 --- a/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -24,12 +24,12 @@ import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -/** Rest integration test. runs against external cluster in 'mvn verify' */ +/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ public class RestIT extends ESRestTestCase { public RestIT(RestTestCandidate testCandidate) { super(testCandidate); } - // we run them all sequentially: start simple! + @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { return createParameters(0, 1); diff --git a/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java index 5537bbfa646..2d90db2c5b7 100644 --- a/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/tar/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -24,12 +24,12 @@ import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -/** Rest integration test. runs against external cluster in 'mvn verify' */ +/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ public class RestIT extends ESRestTestCase { public RestIT(RestTestCandidate testCandidate) { super(testCandidate); } - // we run them all sequentially: start simple! + @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { return createParameters(0, 1); diff --git a/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java b/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java index 5537bbfa646..2d90db2c5b7 100644 --- a/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java +++ b/distribution/zip/src/test/java/org/elasticsearch/test/rest/RestIT.java @@ -24,12 +24,12 @@ import org.elasticsearch.test.rest.parser.RestTestParseException; import java.io.IOException; -/** Rest integration test. runs against external cluster in 'mvn verify' */ +/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ public class RestIT extends ESRestTestCase { public RestIT(RestTestCandidate testCandidate) { super(testCandidate); } - // we run them all sequentially: start simple! + @ParametersFactory public static Iterable parameters() throws IOException, RestTestParseException { return createParameters(0, 1); diff --git a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc index 0487eb25d0d..510cd7caae5 100644 --- a/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/movavg-aggregation.asciidoc @@ -351,7 +351,7 @@ you can disable this behavior with `pad: false` All the moving average model support a "prediction" mode, which will attempt to extrapolate into the future given the current smoothed, moving average. Depending on the model and parameter, these predictions may or may not be accurate. -Predictions are enabled by adding a `predict` parameter to any moving average aggregation, specifying the nubmer of +Predictions are enabled by adding a `predict` parameter to any moving average aggregation, specifying the number of predictions you would like appended to the end of the series. These predictions will be spaced out at the same interval as your buckets: diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 630a557bb15..923cb61ff54 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -928,8 +928,8 @@ TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9]) [options="header"] |====== | Name | Required | Default | Description -| `match_field` | yes | - | The field to use for grok expression parsing -| `match_pattern` | yes | - | The grok expression to match and extract named captures with +| `field` | yes | - | The field to use for grok expression parsing +| `pattern` | yes | - | The grok expression to match and extract named captures with | `pattern_definitions` | no | - | A map of pattern-name and pattern tuples defining custom patterns to be used by the current processor. Patterns matching existing names will override the pre-existing definition. |====== @@ -959,8 +959,8 @@ An example pipeline for processing the above document using Grok: "processors": [ { "grok": { - "match_field": "message", - "match_pattern": "%{IP:client} %{WORD:method} %{URIPATHPARAM:request} %{NUMBER:bytes} %{NUMBER:duration}" + "field": "message", + "pattern": "%{IP:client} %{WORD:method} %{URIPATHPARAM:request} %{NUMBER:bytes} %{NUMBER:duration}" } } ] @@ -990,8 +990,8 @@ An example of a pipeline specifying custom pattern definitions: "processors": [ { "grok": { - "match_field": "message", - "match_pattern": "my %{FAVORITE_DOG:dog} is colored %{RGB:color}" + "field": "message", + "pattern": "my %{FAVORITE_DOG:dog} is colored %{RGB:color}" "pattern_definitions" : { "FAVORITE_DOG" : "beagle", "RGB" : "RED|GREEN|BLUE" diff --git a/docs/reference/migration/migrate_2_0/mapping.asciidoc b/docs/reference/migration/migrate_2_0/mapping.asciidoc index 24c56f4b254..b4ee0d54412 100644 --- a/docs/reference/migration/migrate_2_0/mapping.asciidoc +++ b/docs/reference/migration/migrate_2_0/mapping.asciidoc @@ -278,7 +278,7 @@ been removed in favour of just using the `analyzer` setting. If just the `analyzer` is set, it will be used at index time and at search time. To use a different analyzer at search time, specify both the `analyzer` and a `search_analyzer`. The `index_analyzer`, `search_analyzer`, and `analyzer` type-level settings -have also been removed, as is is no longer possible to select fields based on +have also been removed, as it is no longer possible to select fields based on the type name. The `_analyzer` meta-field, which allowed setting an analyzer per document has diff --git a/docs/reference/migration/migrate_3_0.asciidoc b/docs/reference/migration/migrate_3_0.asciidoc index c66508d3026..9144d8fc59e 100644 --- a/docs/reference/migration/migrate_3_0.asciidoc +++ b/docs/reference/migration/migrate_3_0.asciidoc @@ -323,6 +323,13 @@ hatch of setting the system property "es.netty.gathering" to "false". Time has proven enabling gathering by default is a non-issue and this non-documented setting has been removed. +==== Removed es.useLinkedTransferQueue + +The system property `es.useLinkedTransferQueue` could be used to +control the queue implementation used in the cluster service and the +handling of ping responses during discovery. This was an undocumented +setting and has been removed. + [[breaking_30_mapping_changes]] === Mapping changes diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/TestIndexableBinaryStringTools.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/TestIndexableBinaryStringTools.java index ff6121f86e4..ab51c62d1cf 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/TestIndexableBinaryStringTools.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/TestIndexableBinaryStringTools.java @@ -43,13 +43,14 @@ import java.util.Locale; public class TestIndexableBinaryStringTools extends LuceneTestCase { private static int NUM_RANDOM_TESTS; private static int MAX_RANDOM_BINARY_LENGTH; - + private static final String LINE_SEPARATOR = System.lineSeparator(); + @BeforeClass public static void beforeClass() throws Exception { NUM_RANDOM_TESTS = atLeast(200); MAX_RANDOM_BINARY_LENGTH = atLeast(300); } - + public void testSingleBinaryRoundTrip() { byte[] binary = new byte[] { (byte) 0x23, (byte) 0x98, (byte) 0x13, (byte) 0xE4, (byte) 0x76, (byte) 0x41, (byte) 0xB2, (byte) 0xC9, @@ -68,15 +69,15 @@ public class TestIndexableBinaryStringTools extends LuceneTestCase { decoded.length); assertEquals("Round trip decode/decode returned different results:" - + System.getProperty("line.separator") + "original: " + + LINE_SEPARATOR + "original: " + binaryDump(binary, binary.length) - + System.getProperty("line.separator") + " encoded: " + + LINE_SEPARATOR + " encoded: " + charArrayDump(encoded, encoded.length) - + System.getProperty("line.separator") + " decoded: " + + LINE_SEPARATOR + " decoded: " + binaryDump(decoded, decoded.length), binaryDump(binary, binary.length), binaryDump(decoded, decoded.length)); } - + public void testEncodedSortability() { byte[] originalArray1 = new byte[MAX_RANDOM_BINARY_LENGTH]; char[] originalString1 = new char[MAX_RANDOM_BINARY_LENGTH]; @@ -127,15 +128,15 @@ public class TestIndexableBinaryStringTools extends LuceneTestCase { assertEquals("Test #" + (testNum + 1) + ": Original bytes and encoded chars compare differently:" - + System.getProperty("line.separator") + " binary 1: " + + LINE_SEPARATOR + " binary 1: " + binaryDump(originalArray1, numBytes1) - + System.getProperty("line.separator") + " binary 2: " + + LINE_SEPARATOR + " binary 2: " + binaryDump(original2, numBytes2) - + System.getProperty("line.separator") + "encoded 1: " + + LINE_SEPARATOR + "encoded 1: " + charArrayDump(encoded1, encodedLen1) - + System.getProperty("line.separator") + "encoded 2: " + + LINE_SEPARATOR + "encoded 2: " + charArrayDump(encoded2, encodedLen2) - + System.getProperty("line.separator"), originalComparison, + + LINE_SEPARATOR, originalComparison, encodedComparison); } } @@ -157,7 +158,7 @@ public class TestIndexableBinaryStringTools extends LuceneTestCase { assertEquals("decoded empty input was not empty", decoded.length, 0); } - + public void testAllNullInput() { byte[] binary = new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0 }; @@ -174,19 +175,19 @@ public class TestIndexableBinaryStringTools extends LuceneTestCase { decoded.length); assertEquals("Round trip decode/decode returned different results:" - + System.getProperty("line.separator") + " original: " + + LINE_SEPARATOR + " original: " + binaryDump(binary, binary.length) - + System.getProperty("line.separator") + "decodedBuf: " + + LINE_SEPARATOR + "decodedBuf: " + binaryDump(decoded, decoded.length), binaryDump(binary, binary.length), binaryDump(decoded, decoded.length)); } - + public void testRandomBinaryRoundTrip() { byte[] binary = new byte[MAX_RANDOM_BINARY_LENGTH]; char[] encoded = new char[MAX_RANDOM_BINARY_LENGTH * 10]; byte[] decoded = new byte[MAX_RANDOM_BINARY_LENGTH]; for (int testNum = 0; testNum < NUM_RANDOM_TESTS; ++testNum) { - int numBytes = random().nextInt(MAX_RANDOM_BINARY_LENGTH - 1) + 1; // Min == 1 + int numBytes = random().nextInt(MAX_RANDOM_BINARY_LENGTH - 1) + 1; // Min == 1 for (int byteNum = 0; byteNum < numBytes; ++byteNum) { binary[byteNum] = (byte) random().nextInt(0x100); @@ -206,15 +207,15 @@ public class TestIndexableBinaryStringTools extends LuceneTestCase { assertEquals("Test #" + (testNum + 1) + ": Round trip decode/decode returned different results:" - + System.getProperty("line.separator") + " original: " - + binaryDump(binary, numBytes) + System.getProperty("line.separator") + + LINE_SEPARATOR + " original: " + + binaryDump(binary, numBytes) + LINE_SEPARATOR + "encodedBuf: " + charArrayDump(encoded, encodedLen) - + System.getProperty("line.separator") + "decodedBuf: " + + LINE_SEPARATOR + "decodedBuf: " + binaryDump(decoded, decodedLen), binaryDump(binary, numBytes), binaryDump(decoded, decodedLen)); } } - + public String binaryDump(byte[] binary, int numBytes) { StringBuilder buf = new StringBuilder(); for (int byteNum = 0 ; byteNum < numBytes ; ++byteNum) { diff --git a/plugins/lang-python/build.gradle b/plugins/lang-python/build.gradle index c541b2a3ed3..0980d7f62c9 100644 --- a/plugins/lang-python/build.gradle +++ b/plugins/lang-python/build.gradle @@ -505,6 +505,8 @@ thirdPartyAudit.excludes = [ 'org.python.objectweb.asm.tree.analysis.BasicVerifier', 'org.python.objectweb.asm.tree.analysis.Frame', 'org.python.objectweb.asm.tree.analysis.SimpleVerifier', + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', 'org.tukaani.xz.ARMOptions', 'org.tukaani.xz.ARMThumbOptions', 'org.tukaani.xz.DeltaOptions', diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index 122bd6dfa80..bcd60abb89e 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -35,3 +35,8 @@ dependencyLicenses { mapping from: /stax-.*/, to: 'stax' } +thirdPartyAudit.excludes = [ + // Optional and not enabled by Elasticsearch + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory', +] diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 924f8cd1d42..915a85ebdc4 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -357,4 +357,8 @@ thirdPartyAudit.excludes = [ // internal java api: sun.misc.SignalHandler 'org.apache.hadoop.util.SignalLogger$Handler', + + // optional dependencies of slf4j-api + 'org.slf4j.impl.StaticMDCBinder', + 'org.slf4j.impl.StaticMarkerBinder', ] diff --git a/plugins/repository-hdfs/licenses/slf4j-api-1.6.2.jar.sha1 b/plugins/repository-hdfs/licenses/slf4j-api-1.6.2.jar.sha1 new file mode 100644 index 00000000000..a2f93ea5580 --- /dev/null +++ b/plugins/repository-hdfs/licenses/slf4j-api-1.6.2.jar.sha1 @@ -0,0 +1 @@ +8619e95939167fb37245b5670135e4feb0ec7d50 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/slf4j-api-LICENSE.txt b/plugins/repository-hdfs/licenses/slf4j-api-LICENSE.txt new file mode 100644 index 00000000000..8fda22f4d72 --- /dev/null +++ b/plugins/repository-hdfs/licenses/slf4j-api-LICENSE.txt @@ -0,0 +1,21 @@ +Copyright (c) 2004-2014 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/plugins/repository-hdfs/licenses/slf4j-api-NOTICE.txt b/plugins/repository-hdfs/licenses/slf4j-api-NOTICE.txt new file mode 100644 index 00000000000..e69de29bb2d diff --git a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java index e9a6ee0fdd8..7f287f8a58d 100644 --- a/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java +++ b/qa/smoke-test-client/src/test/java/org/elasticsearch/smoketest/ESSmokeClientTestCase.java @@ -48,17 +48,14 @@ import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLen import static org.hamcrest.Matchers.notNullValue; /** - * {@link ESSmokeClientTestCase} is an abstract base class to run integration - * tests against an external Elasticsearch Cluster. + * An abstract base class to run integration tests against an Elasticsearch cluster running outside of the test process. *

- * You can define a list of transport addresses from where you can reach your cluster - * by setting "tests.cluster" system property. It defaults to "localhost:9300". + * You can define a list of transport addresses from where you can reach your cluster by setting "tests.cluster" system + * property. It defaults to "localhost:9300". If you run this from `gradle integTest` then it will start the clsuter for + * you and set up the property. *

- * All tests can be run from maven using mvn install as maven will start an external cluster first. - *

- * If you want to debug this module from your IDE, then start an external cluster by yourself - * then run JUnit. If you changed the default port, set "tests.cluster=localhost:PORT" when running - * your test. + * If you want to debug this module from your IDE, then start an external cluster by yourself, maybe with `gradle run`, + * then run JUnit. If you changed the default port, set "-Dtests.cluster=localhost:PORT" when running your test. */ @LuceneTestCase.SuppressSysoutChecks(bugUrl = "we log a lot on purpose") public abstract class ESSmokeClientTestCase extends LuceneTestCase { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json index f36157144e8..14ac9862800 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.cancel.json @@ -4,18 +4,18 @@ "methods": ["POST"], "url": { "path": "/_tasks", - "paths": ["/_tasks/_cancel", "/_tasks/{node_id}/_cancel", "/_tasks/{node_id}/{task_id}/_cancel"], + "paths": ["/_tasks/_cancel", "/_tasks/{task_id}/_cancel"], "parts": { - "node_id": { - "type": "list", - "description": "A comma-separated list of node IDs or names to limit the request; use `_local` to cancel only tasks on the node you're connecting to, leave empty to cancel tasks on all nodes" - }, "task_id": { "type": "number", "description": "Cancel the task with specified id" } }, "params": { + "node_id": { + "type": "list", + "description": "A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes" + }, "actions": { "type": "list", "description": "A comma-separated list of actions that should be cancelled. Leave empty to cancel all." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json index f44fa92f853..7e8683b3475 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/tasks.list.json @@ -4,18 +4,18 @@ "methods": ["GET"], "url": { "path": "/_tasks", - "paths": ["/_tasks", "/_tasks/{node_id}", "/_tasks/{node_id}/{task_id}"], + "paths": ["/_tasks", "/_tasks/{task_id}"], "parts": { - "node_id": { - "type": "list", - "description": "A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes" - }, "task_id": { "type": "number", "description": "Return the task with specified id" } }, "params": { + "node_id": { + "type": "list", + "description": "A comma-separated list of node IDs or names to limit the returned information; use `_local` to return information from the node you're connecting to, leave empty to get information from all nodes" + }, "actions": { "type": "list", "description": "A comma-separated list of actions that should be returned. Leave empty to return all." diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.cancel/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.cancel/10_basic.yaml index 6d8d7a9a205..d65ee04211b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.cancel/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/tasks.cancel/10_basic.yaml @@ -2,7 +2,6 @@ "tasks_cancel test": - do: tasks.cancel: - node_id: _local - task_id: 1 + actions: "unknown_action" - length: { nodes: 0 } diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index 60cde5a5194..aa77c670a42 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -122,7 +122,7 @@ public class BootstrapForTesting { } // intellij hack: intellij test runner wants setIO and will // screw up all test logging without it! - if (System.getProperty("tests.maven") == null) { + if (System.getProperty("tests.gradle") == null) { perms.add(new RuntimePermission("setIO")); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index 2347fc47672..6142edb9394 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -42,8 +42,8 @@ import static org.elasticsearch.test.rest.ESRestTestCase.REST_TESTS_SPEC; import static org.elasticsearch.test.rest.ESRestTestCase.REST_TESTS_SUITE; /** - * A {@link RunListener} that emits to {@link System#err} a string with command - * line parameters allowing quick test re-run under MVN command line. + * A {@link RunListener} that emits a command you can use to re-run a failing test with the failing random seed to + * {@link System#err}. */ public class ReproduceInfoPrinter extends RunListener { @@ -60,7 +60,7 @@ public class ReproduceInfoPrinter extends RunListener { } /** - * true if we are running maven integration tests (mvn verify) + * Are we in the integ test phase? */ static boolean inVerifyPhase() { return Boolean.parseBoolean(System.getProperty("tests.verify.phase")); @@ -75,7 +75,7 @@ public class ReproduceInfoPrinter extends RunListener { final StringBuilder b = new StringBuilder("REPRODUCE WITH: gradle "); String task = System.getProperty("tests.task"); - // TODO: enforce (intellij still runs the runner?) or use default "test" but that wont' work for integ + // TODO: enforce (intellij still runs the runner?) or use default "test" but that won't work for integ b.append(task); GradleMessageBuilder gradleMessageBuilder = new GradleMessageBuilder(b); @@ -140,7 +140,8 @@ public class ReproduceInfoPrinter extends RunListener { appendProperties("es.logger.level"); if (inVerifyPhase()) { // these properties only make sense for integration tests - appendProperties("es.node.mode", "es.node.local", TESTS_CLUSTER, ESIntegTestCase.TESTS_ENABLE_MOCK_MODULES); + appendProperties("es.node.mode", "es.node.local", TESTS_CLUSTER, + ESIntegTestCase.TESTS_ENABLE_MOCK_MODULES); } appendProperties("tests.assertion.disabled", "tests.security.manager", "tests.nightly", "tests.jvms", "tests.client.ratio", "tests.heap.size", "tests.bwc", "tests.bwc.version"); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java index a29739c3982..e5597713570 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/RestClient.java @@ -34,8 +34,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.network.InetAddresses; -import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; @@ -48,7 +46,6 @@ import javax.net.ssl.SSLContext; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; -import java.net.InetSocketAddress; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; @@ -284,7 +281,7 @@ public class RestClient implements Closeable { SSLContext sslcontext = SSLContexts.custom() .loadTrustMaterial(keyStore, null) .build(); - sslsf = new SSLConnectionSocketFactory(sslcontext); + sslsf = new SSLConnectionSocketFactory(sslcontext, StrictHostnameVerifier.INSTANCE); } catch (KeyStoreException|NoSuchAlgorithmException|KeyManagementException|CertificateException e) { throw new RuntimeException(e); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifier.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifier.java new file mode 100644 index 00000000000..33a92ceb417 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifier.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test.rest.client; + +import org.apache.http.conn.ssl.X509HostnameVerifier; +import org.apache.http.conn.util.InetAddressUtils; + +import javax.net.ssl.SSLException; +import javax.net.ssl.SSLSession; +import javax.net.ssl.SSLSocket; +import java.io.IOException; +import java.security.cert.X509Certificate; + +/** + * A custom {@link X509HostnameVerifier} implementation that wraps calls to the {@link org.apache.http.conn.ssl.StrictHostnameVerifier} and + * properly handles IPv6 addresses that come from a URL in the form http://[::1]:9200/ by removing the surrounding brackets. + * + * This is a variation of the fix for HTTPCLIENT-1698, which is not + * released yet as of Apache HttpClient 4.5.1 + */ +final class StrictHostnameVerifier implements X509HostnameVerifier { + + static final StrictHostnameVerifier INSTANCE = new StrictHostnameVerifier(); + + // We need to wrap the default verifier for HttpClient since we use an older version and the following issue is not + // fixed in a released version yet https://issues.apache.org/jira/browse/HTTPCLIENT-1698 + // TL;DR we need to strip '[' and ']' from IPv6 addresses if they come from a URL + private final X509HostnameVerifier verifier = new org.apache.http.conn.ssl.StrictHostnameVerifier(); + + private StrictHostnameVerifier() {} + + @Override + public boolean verify(String host, SSLSession sslSession) { + return verifier.verify(stripBracketsIfNecessary(host), sslSession); + } + + @Override + public void verify(String host, SSLSocket ssl) throws IOException { + verifier.verify(stripBracketsIfNecessary(host), ssl); + } + + @Override + public void verify(String host, X509Certificate cert) throws SSLException { + verifier.verify(stripBracketsIfNecessary(host), cert); + } + + @Override + public void verify(String host, String[] cns, String[] subjectAlts) throws SSLException { + verifier.verify(stripBracketsIfNecessary(host), cns, subjectAlts); + } + + private String stripBracketsIfNecessary(String host) { + if (host.startsWith("[") && host.endsWith("]")) { + String newHost = host.substring(1, host.length() - 1); + assert InetAddressUtils.isIPv6Address(newHost); + return newHost; + } + return host; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifierTests.java b/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifierTests.java new file mode 100644 index 00000000000..7bbda67fbdb --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/client/StrictHostnameVerifierTests.java @@ -0,0 +1,120 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test.rest.client; + +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import javax.net.ssl.SSLSession; +import javax.net.ssl.SSLSocket; +import javax.security.auth.x500.X500Principal; +import java.security.cert.Certificate; +import java.security.cert.X509Certificate; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * Tests for the {@link StrictHostnameVerifier} to validate that it can verify IPv6 addresses with and without bracket notation, in + * addition to other address types. + */ +public class StrictHostnameVerifierTests extends ESTestCase { + + private static final int IP_SAN_TYPE = 7; + private static final int DNS_SAN_TYPE = 2; + + private static final String[] CNS = new String[] { "my node" }; + private static final String[] IP_SANS = new String[] { "127.0.0.1", "192.168.1.1", "::1" }; + private static final String[] DNS_SANS = new String[] { "localhost", "computer", "localhost6" }; + + private SSLSocket sslSocket; + private SSLSession sslSession; + private X509Certificate certificate; + + @Before + public void setupMocks() throws Exception { + sslSocket = mock(SSLSocket.class); + sslSession = mock(SSLSession.class); + certificate = mock(X509Certificate.class); + Collection> subjectAlternativeNames = new ArrayList<>(); + for (String san : IP_SANS) { + subjectAlternativeNames.add(Arrays.asList(IP_SAN_TYPE, san)); + } + for (String san : DNS_SANS) { + subjectAlternativeNames.add(Arrays.asList(DNS_SAN_TYPE, san)); + } + + when(sslSocket.getSession()).thenReturn(sslSession); + when(sslSession.getPeerCertificates()).thenReturn(new Certificate[] { certificate }); + when(certificate.getSubjectX500Principal()).thenReturn(new X500Principal("CN=" + CNS[0])); + when(certificate.getSubjectAlternativeNames()).thenReturn(subjectAlternativeNames); + } + + public void testThatIPv6WithBracketsWorks() throws Exception { + final String ipv6Host = "[::1]"; + + // an exception will be thrown if verification fails + StrictHostnameVerifier.INSTANCE.verify(ipv6Host, CNS, IP_SANS); + StrictHostnameVerifier.INSTANCE.verify(ipv6Host, sslSocket); + StrictHostnameVerifier.INSTANCE.verify(ipv6Host, certificate); + + // this is the only one we can assert on + assertTrue(StrictHostnameVerifier.INSTANCE.verify(ipv6Host, sslSession)); + } + + public void testThatIPV6WithoutBracketWorks() throws Exception { + final String ipv6Host = "::1"; + + // an exception will be thrown if verification fails + StrictHostnameVerifier.INSTANCE.verify(ipv6Host, CNS, IP_SANS); + StrictHostnameVerifier.INSTANCE.verify(ipv6Host, sslSocket); + StrictHostnameVerifier.INSTANCE.verify(ipv6Host, certificate); + + // this is the only one we can assert on + assertTrue(StrictHostnameVerifier.INSTANCE.verify(ipv6Host, sslSession)); + } + + public void testThatIPV4Works() throws Exception { + final String ipv4Host = randomFrom("127.0.0.1", "192.168.1.1"); + + // an exception will be thrown if verification fails + StrictHostnameVerifier.INSTANCE.verify(ipv4Host, CNS, IP_SANS); + StrictHostnameVerifier.INSTANCE.verify(ipv4Host, sslSocket); + StrictHostnameVerifier.INSTANCE.verify(ipv4Host, certificate); + + // this is the only one we can assert on + assertTrue(StrictHostnameVerifier.INSTANCE.verify(ipv4Host, sslSession)); + } + + public void testThatHostnameWorks() throws Exception { + final String host = randomFrom(DNS_SANS); + + // an exception will be thrown if verification fails + StrictHostnameVerifier.INSTANCE.verify(host, CNS, DNS_SANS); + StrictHostnameVerifier.INSTANCE.verify(host, sslSocket); + StrictHostnameVerifier.INSTANCE.verify(host, certificate); + + // this is the only one we can assert on + assertTrue(StrictHostnameVerifier.INSTANCE.verify(host, sslSession)); + } +}