Merge branch 'master' into dots2
This commit is contained in:
commit
03754ade83
|
@ -67,7 +67,6 @@ public class PluginBuildPlugin extends BuildPlugin {
|
|||
provided "com.vividsolutions:jts:${project.versions.jts}"
|
||||
provided "log4j:log4j:${project.versions.log4j}"
|
||||
provided "log4j:apache-log4j-extras:${project.versions.log4j}"
|
||||
provided "org.slf4j:slf4j-api:${project.versions.slf4j}"
|
||||
provided "net.java.dev.jna:jna:${project.versions.jna}"
|
||||
}
|
||||
}
|
||||
|
@ -101,11 +100,6 @@ public class PluginBuildPlugin extends BuildPlugin {
|
|||
from pluginMetadata // metadata (eg custom security policy)
|
||||
from project.jar // this plugin's jar
|
||||
from project.configurations.runtime - project.configurations.provided // the dep jars
|
||||
// hack just for slf4j, in case it is "upgrade" from provided to compile,
|
||||
// since it is not actually provided in distributions
|
||||
from project.configurations.runtime.fileCollection { Dependency dep ->
|
||||
return dep.name == 'slf4j-api' && project.configurations.compile.dependencies.contains(dep)
|
||||
}
|
||||
// extra files for the plugin to go into the zip
|
||||
from('src/main/packaging') // TODO: move all config/bin/_size/etc into packaging
|
||||
from('src/main') {
|
||||
|
|
|
@ -374,9 +374,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]io[/\\]Channels.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]io[/\\]stream[/\\]NamedWriteableRegistry.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]joda[/\\]Joda.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]logging[/\\]ESLoggerFactory.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]logging[/\\]Loggers.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]logging[/\\]log4j[/\\]LogConfigurator.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]Lucene.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]all[/\\]AllTermQuery.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]common[/\\]lucene[/\\]index[/\\]ElasticsearchDirectoryReader.java" checks="LineLength" />
|
||||
|
|
|
@ -4,7 +4,7 @@ lucene = 5.5.0
|
|||
# optional dependencies
|
||||
spatial4j = 0.5
|
||||
jts = 1.13
|
||||
jackson = 2.6.2
|
||||
jackson = 2.7.1
|
||||
log4j = 1.2.17
|
||||
slf4j = 1.6.2
|
||||
jna = 4.1.0
|
||||
|
|
|
@ -77,7 +77,6 @@ dependencies {
|
|||
// logging
|
||||
compile "log4j:log4j:${versions.log4j}", optional
|
||||
compile "log4j:apache-log4j-extras:${versions.log4j}", optional
|
||||
compile "org.slf4j:slf4j-api:${versions.slf4j}", optional
|
||||
|
||||
compile "net.java.dev.jna:jna:${versions.jna}", optional
|
||||
|
||||
|
@ -224,8 +223,9 @@ thirdPartyAudit.excludes = [
|
|||
'org.osgi.util.tracker.ServiceTracker',
|
||||
'org.osgi.util.tracker.ServiceTrackerCustomizer',
|
||||
|
||||
'org.slf4j.impl.StaticMDCBinder',
|
||||
'org.slf4j.impl.StaticMarkerBinder',
|
||||
// from org.netty.util.internal.logging.InternalLoggerFactory (netty) - it's optional
|
||||
'org.slf4j.Logger',
|
||||
'org.slf4j.LoggerFactory',
|
||||
]
|
||||
|
||||
// dependency license are currently checked in distribution
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.elasticsearch.cluster.action.shard.ShardStateAction;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.logging.support.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
|
|
@ -62,7 +62,7 @@ public class FieldStatsRequest extends BroadcastRequest<FieldStatsRequest> {
|
|||
|
||||
public void setIndexConstraints(IndexConstraint[] indexConstraints) {
|
||||
if (indexConstraints == null) {
|
||||
throw new NullPointerException("specified index_contraints can't be null");
|
||||
throw new NullPointerException("specified index_constraints can't be null");
|
||||
}
|
||||
this.indexConstraints = indexConstraints;
|
||||
}
|
||||
|
|
|
@ -52,7 +52,6 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
|
|||
private final TransportSearchQueryThenFetchAction queryThenFetchAction;
|
||||
private final TransportSearchDfsQueryAndFetchAction dfsQueryAndFetchAction;
|
||||
private final TransportSearchQueryAndFetchAction queryAndFetchAction;
|
||||
private final boolean optimizeSingleShard;
|
||||
|
||||
@Inject
|
||||
public TransportSearchAction(Settings settings, ThreadPool threadPool,
|
||||
|
@ -68,27 +67,24 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
|
|||
this.queryThenFetchAction = queryThenFetchAction;
|
||||
this.dfsQueryAndFetchAction = dfsQueryAndFetchAction;
|
||||
this.queryAndFetchAction = queryAndFetchAction;
|
||||
this.optimizeSingleShard = this.settings.getAsBoolean("action.search.optimize_single_shard", true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
|
||||
// optimize search type for cases where there is only one shard group to search on
|
||||
if (optimizeSingleShard) {
|
||||
try {
|
||||
ClusterState clusterState = clusterService.state();
|
||||
String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, searchRequest);
|
||||
Map<String, Set<String>> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices());
|
||||
int shardCount = clusterService.operationRouting().searchShardsCount(clusterState, concreteIndices, routingMap);
|
||||
if (shardCount == 1) {
|
||||
// if we only have one group, then we always want Q_A_F, no need for DFS, and no need to do THEN since we hit one shard
|
||||
searchRequest.searchType(QUERY_AND_FETCH);
|
||||
}
|
||||
} catch (IndexNotFoundException | IndexClosedException e) {
|
||||
// ignore these failures, we will notify the search response if its really the case from the actual action
|
||||
} catch (Exception e) {
|
||||
logger.debug("failed to optimize search type, continue as normal", e);
|
||||
try {
|
||||
ClusterState clusterState = clusterService.state();
|
||||
String[] concreteIndices = indexNameExpressionResolver.concreteIndices(clusterState, searchRequest);
|
||||
Map<String, Set<String>> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), searchRequest.indices());
|
||||
int shardCount = clusterService.operationRouting().searchShardsCount(clusterState, concreteIndices, routingMap);
|
||||
if (shardCount == 1) {
|
||||
// if we only have one group, then we always want Q_A_F, no need for DFS, and no need to do THEN since we hit one shard
|
||||
searchRequest.searchType(QUERY_AND_FETCH);
|
||||
}
|
||||
} catch (IndexNotFoundException | IndexClosedException e) {
|
||||
// ignore these failures, we will notify the search response if its really the case from the actual action
|
||||
} catch (Exception e) {
|
||||
logger.debug("failed to optimize search type, continue as normal", e);
|
||||
}
|
||||
if (searchRequest.searchType() == DFS_QUERY_THEN_FETCH) {
|
||||
dfsQueryThenFetchAction.execute(searchRequest, listener);
|
||||
|
|
|
@ -35,7 +35,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
|
|||
import org.elasticsearch.cluster.routing.ShardIterator;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.logging.support.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
|
|
@ -35,7 +35,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
|
|||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.ShardsIterator;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.logging.support.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
|
|
@ -29,10 +29,9 @@ import org.elasticsearch.common.SuppressForbidden;
|
|||
import org.elasticsearch.common.cli.CliTool;
|
||||
import org.elasticsearch.common.cli.Terminal;
|
||||
import org.elasticsearch.common.inject.CreationException;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.LogConfigurator;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.logging.log4j.LogConfigurator;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
|
|
@ -366,7 +366,7 @@ public class IndexShardRoutingTable implements Iterable<ShardRouting> {
|
|||
}
|
||||
}
|
||||
if (ordered.isEmpty()) {
|
||||
throw new IllegalArgumentException("No data node with critera [" + nodeAttribute + "] found");
|
||||
throw new IllegalArgumentException("No data node with criteria [" + nodeAttribute + "] found");
|
||||
}
|
||||
return new PlainShardIterator(shardId, ordered);
|
||||
}
|
||||
|
|
|
@ -458,7 +458,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
|
|||
*/
|
||||
public void started(ShardRouting shard) {
|
||||
ensureMutable();
|
||||
assert !shard.active() : "expected an intializing shard " + shard;
|
||||
assert !shard.active() : "expected an initializing shard " + shard;
|
||||
if (shard.relocatingNodeId() == null) {
|
||||
// if this is not a target shard for relocation, we need to update statistics
|
||||
inactiveShardCount--;
|
||||
|
|
|
@ -715,7 +715,7 @@ public class BalancedShardsAllocator extends AbstractComponent implements Shards
|
|||
}
|
||||
}
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("No eligable node found to assign shard [{}] decision [{}]", shard, decision.type());
|
||||
logger.trace("No eligible node found to assign shard [{}] decision [{}]", shard, decision.type());
|
||||
}
|
||||
} else if (logger.isTraceEnabled()) {
|
||||
logger.trace("No Node found to assign shard [{}]", shard);
|
||||
|
|
|
@ -69,24 +69,22 @@ import java.util.function.Supplier;
|
|||
import static org.elasticsearch.ElasticsearchException.readException;
|
||||
import static org.elasticsearch.ElasticsearchException.readStackTrace;
|
||||
|
||||
/**
|
||||
* A stream from this node to another node. Technically, it can also be streamed to a byte array but that is mostly for testing.
|
||||
*/
|
||||
public abstract class StreamInput extends InputStream {
|
||||
|
||||
private final NamedWriteableRegistry namedWriteableRegistry;
|
||||
|
||||
private Version version = Version.CURRENT;
|
||||
|
||||
protected StreamInput() {
|
||||
this.namedWriteableRegistry = new NamedWriteableRegistry();
|
||||
}
|
||||
|
||||
protected StreamInput(NamedWriteableRegistry namedWriteableRegistry) {
|
||||
this.namedWriteableRegistry = namedWriteableRegistry;
|
||||
}
|
||||
|
||||
/**
|
||||
* The version of the node on the other side of this stream.
|
||||
*/
|
||||
public Version getVersion() {
|
||||
return this.version;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the version of the node on the other side of this stream.
|
||||
*/
|
||||
public void setVersion(Version version) {
|
||||
this.version = version;
|
||||
}
|
||||
|
|
|
@ -60,19 +60,24 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
* A stream from another node to this node. Technically, it can also be streamed from a byte array but that is mostly for testing.
|
||||
*/
|
||||
public abstract class StreamOutput extends OutputStream {
|
||||
|
||||
private Version version = Version.CURRENT;
|
||||
|
||||
/**
|
||||
* The version of the node on the other side of this stream.
|
||||
*/
|
||||
public Version getVersion() {
|
||||
return this.version;
|
||||
}
|
||||
|
||||
public StreamOutput setVersion(Version version) {
|
||||
/**
|
||||
* Set the version of the node on the other side of this stream.
|
||||
*/
|
||||
public void setVersion(Version version) {
|
||||
this.version = version;
|
||||
return this;
|
||||
}
|
||||
|
||||
public long position() throws IOException {
|
||||
|
|
|
@ -22,11 +22,26 @@ package org.elasticsearch.common.io.stream;
|
|||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Implementers can be written to a {@linkplain StreamOutput} and read from a {@linkplain StreamInput}. This allows them to be "thrown
|
||||
* across the wire" using Elasticsearch's internal protocol. If the implementer also implements equals and hashCode then a copy made by
|
||||
* serializing and deserializing must be equal and have the same hashCode. It isn't required that such a copy be entirely unchanged. For
|
||||
* example, {@link org.elasticsearch.common.unit.TimeValue} converts the time to nanoseconds for serialization.
|
||||
*
|
||||
* Prefer implementing {@link Writeable} over implementing this interface where possible. Lots of code depends on this interface so this
|
||||
* isn't always possible.
|
||||
*
|
||||
* Implementers of this interface almost always declare a no arg constructor that is exclusively used for creating "empty" objects on which
|
||||
* you then call {@link #readFrom(StreamInput)}. Because {@linkplain #readFrom(StreamInput)} isn't part of the constructor the fields
|
||||
* on implementers cannot be final. It is these reasons that this interface has fallen out of favor compared to {@linkplain Writeable}.
|
||||
*/
|
||||
public interface Streamable {
|
||||
|
||||
/**
|
||||
* Set this object's fields from a {@linkplain StreamInput}.
|
||||
*/
|
||||
void readFrom(StreamInput in) throws IOException;
|
||||
|
||||
/**
|
||||
* Write this object's fields to a {@linkplain StreamOutput}.
|
||||
*/
|
||||
void writeTo(StreamOutput out) throws IOException;
|
||||
}
|
||||
|
|
|
@ -20,11 +20,17 @@ package org.elasticsearch.common.io.stream;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Implementers can be read from {@linkplain StreamInput} by calling their {@link #readFrom(StreamInput)} method.
|
||||
*
|
||||
* It is common for implementers of this interface to declare a <code>public static final</code> instance of themselves named PROTOTYPE so
|
||||
* users can call {@linkplain #readFrom(StreamInput)} on it. It is also fairly typical for readFrom to be implemented as a method that just
|
||||
* calls a constructor that takes {@linkplain StreamInput} as a parameter. This allows the fields in the implementer to be
|
||||
* <code>final</code>.
|
||||
*/
|
||||
public interface StreamableReader<T> {
|
||||
/**
|
||||
* Reads a copy of an object with the same type form the stream input
|
||||
*
|
||||
* The caller object remains unchanged.
|
||||
* Reads an object of this type from the provided {@linkplain StreamInput}. The receiving instance remains unchanged.
|
||||
*/
|
||||
T readFrom(StreamInput in) throws IOException;
|
||||
}
|
||||
|
|
|
@ -21,10 +21,20 @@ package org.elasticsearch.common.io.stream;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Implementers can be written to a {@linkplain StreamOutput} and read from a {@linkplain StreamInput}. This allows them to be "thrown
|
||||
* across the wire" using Elasticsearch's internal protocol. If the implementer also implements equals and hashCode then a copy made by
|
||||
* serializing and deserializing must be equal and have the same hashCode. It isn't required that such a copy be entirely unchanged. For
|
||||
* example, {@link org.elasticsearch.common.unit.TimeValue} converts the time to nanoseconds for serialization.
|
||||
* {@linkplain org.elasticsearch.common.unit.TimeValue} actually implements {@linkplain Streamable} not {@linkplain Writeable} but it has
|
||||
* the same contract.
|
||||
*
|
||||
* Prefer implementing this interface over implementing {@link Streamable} where possible. Lots of code depends on {@linkplain Streamable}
|
||||
* so this isn't always possible.
|
||||
*/
|
||||
public interface Writeable<T> extends StreamableReader<T> {
|
||||
|
||||
/**
|
||||
* Writes the current object into the output stream out
|
||||
* Write this into the {@linkplain StreamOutput}.
|
||||
*/
|
||||
void writeTo(StreamOutput out) throws IOException;
|
||||
}
|
||||
|
|
|
@ -17,24 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.jdk;
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
|
||||
/**
|
||||
*
|
||||
* Classes for streaming objects from one Elasticsearch node to another over its binary internode protocol.
|
||||
*/
|
||||
public class JdkESLoggerFactory extends ESLoggerFactory {
|
||||
|
||||
@Override
|
||||
protected ESLogger rootLogger() {
|
||||
return getLogger("");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ESLogger newInstance(String prefix, String name) {
|
||||
final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(name);
|
||||
return new JdkESLogger(prefix, logger);
|
||||
}
|
||||
}
|
||||
package org.elasticsearch.common.io.stream;
|
|
@ -17,13 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.log4j;
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import org.apache.log4j.Layout;
|
||||
import org.apache.log4j.WriterAppender;
|
||||
import org.apache.log4j.helpers.LogLog;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
|
@ -19,104 +19,188 @@
|
|||
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import static org.elasticsearch.common.logging.LoggerMessageFormat.format;
|
||||
|
||||
/**
|
||||
* Contract for all elasticsearch loggers.
|
||||
* Elasticsearch's logger wrapper.
|
||||
*/
|
||||
public interface ESLogger {
|
||||
public class ESLogger {
|
||||
private static final String FQCN = ESLogger.class.getName();
|
||||
|
||||
String getPrefix();
|
||||
private final String prefix;
|
||||
private final Logger logger;
|
||||
|
||||
String getName();
|
||||
public ESLogger(String prefix, Logger logger) {
|
||||
this.prefix = prefix;
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to set the logger level
|
||||
* If the new level is null, the logger will inherit its level
|
||||
* from its nearest ancestor with a specific (non-null) level value.
|
||||
* @param level the new level
|
||||
* The prefix of the log.
|
||||
*/
|
||||
void setLevel(String level);
|
||||
public String getPrefix() {
|
||||
return this.prefix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the current logger level
|
||||
* If the level is null, it means that the logger inherits its level
|
||||
* from its nearest ancestor with a specific (non-null) level value.
|
||||
* @return the logger level
|
||||
* Fetch the underlying logger so we can look at it. Only exists for testing.
|
||||
*/
|
||||
String getLevel();
|
||||
Logger getLogger() {
|
||||
return logger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if a TRACE level message is logged.
|
||||
* Set the level of the logger. If the new level is null, the logger will inherit it's level from its nearest ancestor with a non-null
|
||||
* level.
|
||||
*/
|
||||
boolean isTraceEnabled();
|
||||
public void setLevel(String level) {
|
||||
if (level == null) {
|
||||
logger.setLevel(null);
|
||||
} else if ("error".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.ERROR);
|
||||
} else if ("warn".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.WARN);
|
||||
} else if ("info".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.INFO);
|
||||
} else if ("debug".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.DEBUG);
|
||||
} else if ("trace".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.TRACE);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if a DEBUG level message is logged.
|
||||
* The level of this logger. If null then the logger is inheriting it's level from its nearest ancestor with a non-null level.
|
||||
*/
|
||||
boolean isDebugEnabled();
|
||||
public String getLevel() {
|
||||
if (logger.getLevel() == null) {
|
||||
return null;
|
||||
}
|
||||
return logger.getLevel().toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if an INFO level message is logged.
|
||||
* The name of this logger.
|
||||
*/
|
||||
boolean isInfoEnabled();
|
||||
public String getName() {
|
||||
return logger.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if a WARN level message is logged.
|
||||
* Returns {@code true} if a TRACE level message should be logged.
|
||||
*/
|
||||
boolean isWarnEnabled();
|
||||
public boolean isTraceEnabled() {
|
||||
return logger.isTraceEnabled();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if an ERROR level message is logged.
|
||||
* Returns {@code true} if a DEBUG level message should be logged.
|
||||
*/
|
||||
boolean isErrorEnabled();
|
||||
public boolean isDebugEnabled() {
|
||||
return logger.isDebugEnabled();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if an INFO level message should be logged.
|
||||
*/
|
||||
public boolean isInfoEnabled() {
|
||||
return logger.isInfoEnabled();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if a WARN level message should be logged.
|
||||
*/
|
||||
public boolean isWarnEnabled() {
|
||||
return logger.isEnabledFor(Level.WARN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if an ERROR level message should be logged.
|
||||
*/
|
||||
public boolean isErrorEnabled() {
|
||||
return logger.isEnabledFor(Level.ERROR);
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs a TRACE level message.
|
||||
*/
|
||||
public void trace(String msg, Object... params) {
|
||||
trace(msg, null, params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs a TRACE level message with an exception.
|
||||
*/
|
||||
public void trace(String msg, Throwable cause, Object... params) {
|
||||
if (isTraceEnabled()) {
|
||||
logger.log(FQCN, Level.TRACE, format(prefix, msg, params), cause);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs a DEBUG level message.
|
||||
*/
|
||||
void trace(String msg, Object... params);
|
||||
public void debug(String msg, Object... params) {
|
||||
debug(msg, null, params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs a DEBUG level message.
|
||||
* Logs a DEBUG level message with an exception.
|
||||
*/
|
||||
void trace(String msg, Throwable cause, Object... params);
|
||||
public void debug(String msg, Throwable cause, Object... params) {
|
||||
if (isDebugEnabled()) {
|
||||
logger.log(FQCN, Level.DEBUG, format(prefix, msg, params), cause);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs a DEBUG level message.
|
||||
* Logs a INFO level message.
|
||||
*/
|
||||
void debug(String msg, Object... params);
|
||||
public void info(String msg, Object... params) {
|
||||
info(msg, null, params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs a DEBUG level message.
|
||||
* Logs a INFO level message with an exception.
|
||||
*/
|
||||
void debug(String msg, Throwable cause, Object... params);
|
||||
|
||||
/**
|
||||
* Logs an INFO level message.
|
||||
*/
|
||||
void info(String msg, Object... params);
|
||||
|
||||
/**
|
||||
* Logs an INFO level message.
|
||||
*/
|
||||
void info(String msg, Throwable cause, Object... params);
|
||||
public void info(String msg, Throwable cause, Object... params) {
|
||||
if (isInfoEnabled()) {
|
||||
logger.log(FQCN, Level.INFO, format(prefix, msg, params), cause);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs a WARN level message.
|
||||
*/
|
||||
void warn(String msg, Object... params);
|
||||
public void warn(String msg, Object... params) {
|
||||
warn(msg, null, params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs a WARN level message.
|
||||
* Logs a WARN level message with an exception.
|
||||
*/
|
||||
void warn(String msg, Throwable cause, Object... params);
|
||||
public void warn(String msg, Throwable cause, Object... params) {
|
||||
if (isWarnEnabled()) {
|
||||
logger.log(FQCN, Level.WARN, format(prefix, msg, params), cause);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs an ERROR level message.
|
||||
* Logs a ERROR level message.
|
||||
*/
|
||||
void error(String msg, Object... params);
|
||||
public void error(String msg, Object... params) {
|
||||
error(msg, null, params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs an ERROR level message.
|
||||
* Logs a ERROR level message with an exception.
|
||||
*/
|
||||
void error(String msg, Throwable cause, Object... params);
|
||||
|
||||
public void error(String msg, Throwable cause, Object... params) {
|
||||
if (isErrorEnabled()) {
|
||||
logger.log(FQCN, Level.ERROR, format(prefix, msg, params), cause);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,62 +19,29 @@
|
|||
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import org.elasticsearch.common.logging.jdk.JdkESLoggerFactory;
|
||||
import org.elasticsearch.common.logging.log4j.Log4jESLoggerFactory;
|
||||
import org.elasticsearch.common.logging.slf4j.Slf4jESLoggerFactory;
|
||||
import org.elasticsearch.common.settings.AbstractScopedSettings;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Factory to get {@link ESLogger}s
|
||||
*/
|
||||
public abstract class ESLoggerFactory {
|
||||
|
||||
public static final Setting<LogLevel> LOG_DEFAULT_LEVEL_SETTING = new Setting<>("logger.level", LogLevel.INFO.name(), LogLevel::parse, false, Setting.Scope.CLUSTER);
|
||||
public static final Setting<LogLevel> LOG_LEVEL_SETTING = Setting.dynamicKeySetting("logger.", LogLevel.INFO.name(), LogLevel::parse, true, Setting.Scope.CLUSTER);
|
||||
|
||||
private static volatile ESLoggerFactory defaultFactory = new JdkESLoggerFactory();
|
||||
|
||||
static {
|
||||
try {
|
||||
Class<?> loggerClazz = Class.forName("org.apache.log4j.Logger");
|
||||
// below will throw a NoSuchMethod failure with using slf4j log4j bridge
|
||||
loggerClazz.getMethod("setLevel", Class.forName("org.apache.log4j.Level"));
|
||||
defaultFactory = new Log4jESLoggerFactory();
|
||||
} catch (Throwable e) {
|
||||
// no log4j
|
||||
try {
|
||||
Class.forName("org.slf4j.Logger");
|
||||
defaultFactory = new Slf4jESLoggerFactory();
|
||||
} catch (Throwable e1) {
|
||||
// no slf4j
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes the default factory.
|
||||
*/
|
||||
public static void setDefaultFactory(ESLoggerFactory defaultFactory) {
|
||||
if (defaultFactory == null) {
|
||||
throw new NullPointerException("defaultFactory");
|
||||
}
|
||||
ESLoggerFactory.defaultFactory = defaultFactory;
|
||||
}
|
||||
|
||||
public static final Setting<LogLevel> LOG_DEFAULT_LEVEL_SETTING =
|
||||
new Setting<>("logger.level", LogLevel.INFO.name(), LogLevel::parse, false, Setting.Scope.CLUSTER);
|
||||
public static final Setting<LogLevel> LOG_LEVEL_SETTING =
|
||||
Setting.dynamicKeySetting("logger.", LogLevel.INFO.name(), LogLevel::parse, true, Setting.Scope.CLUSTER);
|
||||
|
||||
public static ESLogger getLogger(String prefix, String name) {
|
||||
return defaultFactory.newInstance(prefix == null ? null : prefix.intern(), name.intern());
|
||||
prefix = prefix == null ? null : prefix.intern();
|
||||
name = name.intern();
|
||||
return new ESLogger(prefix, Logger.getLogger(name));
|
||||
}
|
||||
|
||||
public static ESLogger getLogger(String name) {
|
||||
return defaultFactory.newInstance(name.intern());
|
||||
return getLogger(null, name);
|
||||
}
|
||||
|
||||
public static DeprecationLogger getDeprecationLogger(String name) {
|
||||
|
@ -86,17 +53,13 @@ public abstract class ESLoggerFactory {
|
|||
}
|
||||
|
||||
public static ESLogger getRootLogger() {
|
||||
return defaultFactory.rootLogger();
|
||||
return new ESLogger(null, Logger.getRootLogger());
|
||||
}
|
||||
|
||||
public ESLogger newInstance(String name) {
|
||||
return newInstance(null, name);
|
||||
private ESLoggerFactory() {
|
||||
// Utility class can't be built.
|
||||
}
|
||||
|
||||
protected abstract ESLogger rootLogger();
|
||||
|
||||
protected abstract ESLogger newInstance(String prefix, String name);
|
||||
|
||||
public enum LogLevel {
|
||||
WARN, TRACE, INFO, DEBUG, ERROR;
|
||||
public static LogLevel parse(String level) {
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.log4j;
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import org.apache.log4j.PropertyConfigurator;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
|
@ -39,13 +39,14 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
import static org.elasticsearch.common.Strings.cleanPath;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
|
||||
/**
|
||||
*
|
||||
* Configures log4j with a special set of replacements.
|
||||
*/
|
||||
public class LogConfigurator {
|
||||
|
||||
|
@ -54,10 +55,12 @@ public class LogConfigurator {
|
|||
private static final Map<String, String> REPLACEMENTS;
|
||||
static {
|
||||
Map<String, String> replacements = new HashMap<>();
|
||||
replacements.put("console", "org.elasticsearch.common.logging.log4j.ConsoleAppender");
|
||||
// Appenders
|
||||
replacements.put("async", "org.apache.log4j.AsyncAppender");
|
||||
replacements.put("console", ConsoleAppender.class.getName());
|
||||
replacements.put("dailyRollingFile", "org.apache.log4j.DailyRollingFileAppender");
|
||||
replacements.put("externallyRolledFile", "org.apache.log4j.ExternallyRolledFileAppender");
|
||||
replacements.put("extrasRollingFile", "org.apache.log4j.rolling.RollingFileAppender");
|
||||
replacements.put("file", "org.apache.log4j.FileAppender");
|
||||
replacements.put("jdbc", "org.apache.log4j.jdbc.JDBCAppender");
|
||||
replacements.put("jms", "org.apache.log4j.net.JMSAppender");
|
||||
|
@ -65,17 +68,18 @@ public class LogConfigurator {
|
|||
replacements.put("ntevent", "org.apache.log4j.nt.NTEventLogAppender");
|
||||
replacements.put("null", "org.apache.log4j.NullAppender");
|
||||
replacements.put("rollingFile", "org.apache.log4j.RollingFileAppender");
|
||||
replacements.put("extrasRollingFile", "org.apache.log4j.rolling.RollingFileAppender");
|
||||
replacements.put("smtp", "org.apache.log4j.net.SMTPAppender");
|
||||
replacements.put("socket", "org.apache.log4j.net.SocketAppender");
|
||||
replacements.put("socketHub", "org.apache.log4j.net.SocketHubAppender");
|
||||
replacements.put("syslog", "org.apache.log4j.net.SyslogAppender");
|
||||
replacements.put("telnet", "org.apache.log4j.net.TelnetAppender");
|
||||
replacements.put("terminal", "org.elasticsearch.common.logging.log4j.TerminalAppender");
|
||||
// policies
|
||||
replacements.put("terminal", TerminalAppender.class.getName());
|
||||
|
||||
// Policies
|
||||
replacements.put("timeBased", "org.apache.log4j.rolling.TimeBasedRollingPolicy");
|
||||
replacements.put("sizeBased", "org.apache.log4j.rolling.SizeBasedTriggeringPolicy");
|
||||
// layouts
|
||||
|
||||
// Layouts
|
||||
replacements.put("simple", "org.apache.log4j.SimpleLayout");
|
||||
replacements.put("html", "org.apache.log4j.HTMLLayout");
|
||||
replacements.put("pattern", "org.apache.log4j.PatternLayout");
|
||||
|
@ -141,7 +145,8 @@ public class LogConfigurator {
|
|||
static void resolveConfig(Environment env, final Settings.Builder settingsBuilder) {
|
||||
|
||||
try {
|
||||
Files.walkFileTree(env.configFile(), EnumSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
|
||||
Set<FileVisitOption> options = EnumSet.of(FileVisitOption.FOLLOW_LINKS);
|
||||
Files.walkFileTree(env.configFile(), options, Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
String fileName = file.getFileName().toString();
|
|
@ -17,13 +17,13 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.support;
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
* Format string for Elasticsearch log messages.
|
||||
*/
|
||||
public class LoggerMessageFormat {
|
||||
|
||||
|
@ -79,13 +79,13 @@ public class LoggerMessageFormat {
|
|||
// itself escaped: "abc x:\\{}"
|
||||
// we have to consume one backward slash
|
||||
sbuf.append(messagePattern.substring(i, j - 1));
|
||||
deeplyAppendParameter(sbuf, argArray[L], new HashMap());
|
||||
deeplyAppendParameter(sbuf, argArray[L], new HashSet<Object[]>());
|
||||
i = j + 2;
|
||||
}
|
||||
} else {
|
||||
// normal case
|
||||
sbuf.append(messagePattern.substring(i, j));
|
||||
deeplyAppendParameter(sbuf, argArray[L], new HashMap());
|
||||
deeplyAppendParameter(sbuf, argArray[L], new HashSet<Object[]>());
|
||||
i = j + 2;
|
||||
}
|
||||
}
|
||||
|
@ -117,7 +117,7 @@ public class LoggerMessageFormat {
|
|||
}
|
||||
}
|
||||
|
||||
private static void deeplyAppendParameter(StringBuilder sbuf, Object o, Map seenMap) {
|
||||
private static void deeplyAppendParameter(StringBuilder sbuf, Object o, Set<Object[]> seen) {
|
||||
if (o == null) {
|
||||
sbuf.append("null");
|
||||
return;
|
||||
|
@ -144,7 +144,7 @@ public class LoggerMessageFormat {
|
|||
} else if (o instanceof double[]) {
|
||||
doubleArrayAppend(sbuf, (double[]) o);
|
||||
} else {
|
||||
objectArrayAppend(sbuf, (Object[]) o, seenMap);
|
||||
objectArrayAppend(sbuf, (Object[]) o, seen);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -159,18 +159,18 @@ public class LoggerMessageFormat {
|
|||
|
||||
}
|
||||
|
||||
private static void objectArrayAppend(StringBuilder sbuf, Object[] a, Map seenMap) {
|
||||
private static void objectArrayAppend(StringBuilder sbuf, Object[] a, Set<Object[]> seen) {
|
||||
sbuf.append('[');
|
||||
if (!seenMap.containsKey(a)) {
|
||||
seenMap.put(a, null);
|
||||
if (!seen.contains(a)) {
|
||||
seen.add(a);
|
||||
final int len = a.length;
|
||||
for (int i = 0; i < len; i++) {
|
||||
deeplyAppendParameter(sbuf, a[i], seenMap);
|
||||
deeplyAppendParameter(sbuf, a[i], seen);
|
||||
if (i != len - 1)
|
||||
sbuf.append(", ");
|
||||
}
|
||||
// allow repeats in siblings
|
||||
seenMap.remove(a);
|
||||
seen.remove(a);
|
||||
} else {
|
||||
sbuf.append("...");
|
||||
}
|
|
@ -35,8 +35,6 @@ import static org.elasticsearch.common.util.CollectionUtils.asArrayList;
|
|||
|
||||
/**
|
||||
* A set of utilities around Logging.
|
||||
*
|
||||
*
|
||||
*/
|
||||
public class Loggers {
|
||||
|
||||
|
@ -58,20 +56,24 @@ public class Loggers {
|
|||
return consoleLoggingEnabled;
|
||||
}
|
||||
|
||||
public static ESLogger getLogger(Class clazz, Settings settings, ShardId shardId, String... prefixes) {
|
||||
public static ESLogger getLogger(Class<?> clazz, Settings settings, ShardId shardId, String... prefixes) {
|
||||
return getLogger(clazz, settings, shardId.getIndex(), asArrayList(Integer.toString(shardId.id()), prefixes).toArray(new String[0]));
|
||||
}
|
||||
|
||||
/** Just like {@link #getLogger(Class, org.elasticsearch.common.settings.Settings,ShardId,String...)} but String loggerName instead of Class. */
|
||||
/**
|
||||
* Just like {@link #getLogger(Class, org.elasticsearch.common.settings.Settings,ShardId,String...)} but String loggerName instead of
|
||||
* Class.
|
||||
*/
|
||||
public static ESLogger getLogger(String loggerName, Settings settings, ShardId shardId, String... prefixes) {
|
||||
return getLogger(loggerName, settings, asArrayList(shardId.getIndexName(), Integer.toString(shardId.id()), prefixes).toArray(new String[0]));
|
||||
return getLogger(loggerName, settings,
|
||||
asArrayList(shardId.getIndexName(), Integer.toString(shardId.id()), prefixes).toArray(new String[0]));
|
||||
}
|
||||
|
||||
public static ESLogger getLogger(Class clazz, Settings settings, Index index, String... prefixes) {
|
||||
public static ESLogger getLogger(Class<?> clazz, Settings settings, Index index, String... prefixes) {
|
||||
return getLogger(clazz, settings, asArrayList(SPACE, index.getName(), prefixes).toArray(new String[0]));
|
||||
}
|
||||
|
||||
public static ESLogger getLogger(Class clazz, Settings settings, String... prefixes) {
|
||||
public static ESLogger getLogger(Class<?> clazz, Settings settings, String... prefixes) {
|
||||
return getLogger(buildClassLoggerName(clazz), settings, prefixes);
|
||||
}
|
||||
|
||||
|
@ -117,11 +119,11 @@ public class Loggers {
|
|||
return ESLoggerFactory.getLogger(getLoggerName(s));
|
||||
}
|
||||
|
||||
public static ESLogger getLogger(Class clazz) {
|
||||
public static ESLogger getLogger(Class<?> clazz) {
|
||||
return ESLoggerFactory.getLogger(getLoggerName(buildClassLoggerName(clazz)));
|
||||
}
|
||||
|
||||
public static ESLogger getLogger(Class clazz, String... prefixes) {
|
||||
public static ESLogger getLogger(Class<?> clazz, String... prefixes) {
|
||||
return getLogger(buildClassLoggerName(clazz), prefixes);
|
||||
}
|
||||
|
||||
|
@ -146,7 +148,7 @@ public class Loggers {
|
|||
return ESLoggerFactory.getLogger(prefix, getLoggerName(name));
|
||||
}
|
||||
|
||||
private static String buildClassLoggerName(Class clazz) {
|
||||
private static String buildClassLoggerName(Class<?> clazz) {
|
||||
String name = clazz.getName();
|
||||
if (name.startsWith("org.elasticsearch.")) {
|
||||
name = Classes.getPackageName(clazz);
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
|
||||
|
||||
package org.elasticsearch.common.logging.log4j;
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import org.apache.log4j.AppenderSkeleton;
|
||||
import org.apache.log4j.spi.LoggingEvent;
|
|
@ -1,108 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.jdk;
|
||||
|
||||
import org.elasticsearch.common.logging.support.AbstractESLogger;
|
||||
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.LogRecord;
|
||||
|
||||
/**
|
||||
* A {@link LogRecord} which is used in conjunction with {@link JdkESLogger}
|
||||
* with the ability to provide the class name, method name and line number
|
||||
* information of the code calling the logger
|
||||
*/
|
||||
public class ESLogRecord extends LogRecord {
|
||||
private static final String FQCN = AbstractESLogger.class.getName();
|
||||
private String sourceClassName;
|
||||
private String sourceMethodName;
|
||||
private transient boolean needToInferCaller;
|
||||
|
||||
public ESLogRecord(Level level, String msg) {
|
||||
super(level, msg);
|
||||
needToInferCaller = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSourceClassName() {
|
||||
if (needToInferCaller) {
|
||||
inferCaller();
|
||||
}
|
||||
return sourceClassName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSourceClassName(String sourceClassName) {
|
||||
this.sourceClassName = sourceClassName;
|
||||
needToInferCaller = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSourceMethodName() {
|
||||
if (needToInferCaller) {
|
||||
inferCaller();
|
||||
}
|
||||
return sourceMethodName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSourceMethodName(String sourceMethodName) {
|
||||
this.sourceMethodName = sourceMethodName;
|
||||
needToInferCaller = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines the source information for the caller of the logger (class
|
||||
* name, method name, and line number)
|
||||
*/
|
||||
private void inferCaller() {
|
||||
needToInferCaller = false;
|
||||
Throwable throwable = new Throwable();
|
||||
|
||||
boolean lookingForLogger = true;
|
||||
for (final StackTraceElement frame : throwable.getStackTrace()) {
|
||||
String cname = frame.getClassName();
|
||||
boolean isLoggerImpl = isLoggerImplFrame(cname);
|
||||
if (lookingForLogger) {
|
||||
// Skip all frames until we have found the first logger frame.
|
||||
if (isLoggerImpl) {
|
||||
lookingForLogger = false;
|
||||
}
|
||||
} else {
|
||||
if (!isLoggerImpl) {
|
||||
// skip reflection call
|
||||
if (!cname.startsWith("java.lang.reflect.") && !cname.startsWith("sun.reflect.")) {
|
||||
// We've found the relevant frame.
|
||||
setSourceClassName(cname);
|
||||
setSourceMethodName(frame.getMethodName());
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// We haven't found a suitable frame, so just punt. This is
|
||||
// OK as we are only committed to making a "best effort" here.
|
||||
}
|
||||
|
||||
private boolean isLoggerImplFrame(String cname) {
|
||||
// the log record could be created for a platform logger
|
||||
return cname.equals(FQCN);
|
||||
}
|
||||
}
|
|
@ -1,163 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.jdk;
|
||||
|
||||
import org.elasticsearch.common.logging.support.AbstractESLogger;
|
||||
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.LogRecord;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class JdkESLogger extends AbstractESLogger {
|
||||
|
||||
private final Logger logger;
|
||||
|
||||
public JdkESLogger(String prefix, Logger logger) {
|
||||
super(prefix);
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLevel(String level) {
|
||||
if (level == null) {
|
||||
logger.setLevel(null);
|
||||
} else if ("error".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.SEVERE);
|
||||
} else if ("warn".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.WARNING);
|
||||
} else if ("info".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.INFO);
|
||||
} else if ("debug".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.FINE);
|
||||
} else if ("trace".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.FINEST);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLevel() {
|
||||
if (logger.getLevel() == null) {
|
||||
return null;
|
||||
}
|
||||
return logger.getLevel().toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return logger.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isTraceEnabled() {
|
||||
return logger.isLoggable(Level.FINEST);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDebugEnabled() {
|
||||
return logger.isLoggable(Level.FINE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInfoEnabled() {
|
||||
return logger.isLoggable(Level.INFO);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isWarnEnabled() {
|
||||
return logger.isLoggable(Level.WARNING);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isErrorEnabled() {
|
||||
return logger.isLoggable(Level.SEVERE);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalTrace(String msg) {
|
||||
LogRecord record = new ESLogRecord(Level.FINEST, msg);
|
||||
logger.log(record);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalTrace(String msg, Throwable cause) {
|
||||
LogRecord record = new ESLogRecord(Level.FINEST, msg);
|
||||
record.setThrown(cause);
|
||||
logger.log(record);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalDebug(String msg) {
|
||||
LogRecord record = new ESLogRecord(Level.FINE, msg);
|
||||
logger.log(record);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalDebug(String msg, Throwable cause) {
|
||||
LogRecord record = new ESLogRecord(Level.FINE, msg);
|
||||
record.setThrown(cause);
|
||||
logger.log(record);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalInfo(String msg) {
|
||||
LogRecord record = new ESLogRecord(Level.INFO, msg);
|
||||
logger.log(record);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalInfo(String msg, Throwable cause) {
|
||||
LogRecord record = new ESLogRecord(Level.INFO, msg);
|
||||
record.setThrown(cause);
|
||||
logger.log(record);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalWarn(String msg) {
|
||||
LogRecord record = new ESLogRecord(Level.WARNING, msg);
|
||||
logger.log(record);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalWarn(String msg, Throwable cause) {
|
||||
LogRecord record = new ESLogRecord(Level.WARNING, msg);
|
||||
record.setThrown(cause);
|
||||
logger.log(record);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalError(String msg) {
|
||||
LogRecord record = new ESLogRecord(Level.SEVERE, msg);
|
||||
logger.log(record);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalError(String msg, Throwable cause) {
|
||||
LogRecord record = new ESLogRecord(Level.SEVERE, msg);
|
||||
record.setThrown(cause);
|
||||
logger.log(record);
|
||||
}
|
||||
|
||||
protected Logger logger() {
|
||||
return logger;
|
||||
}
|
||||
}
|
|
@ -1,147 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.log4j;
|
||||
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.elasticsearch.common.logging.support.AbstractESLogger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class Log4jESLogger extends AbstractESLogger {
|
||||
|
||||
private final org.apache.log4j.Logger logger;
|
||||
private final String FQCN = AbstractESLogger.class.getName();
|
||||
|
||||
public Log4jESLogger(String prefix, Logger logger) {
|
||||
super(prefix);
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
public Logger logger() {
|
||||
return logger;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLevel(String level) {
|
||||
if (level == null) {
|
||||
logger.setLevel(null);
|
||||
} else if ("error".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.ERROR);
|
||||
} else if ("warn".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.WARN);
|
||||
} else if ("info".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.INFO);
|
||||
} else if ("debug".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.DEBUG);
|
||||
} else if ("trace".equalsIgnoreCase(level)) {
|
||||
logger.setLevel(Level.TRACE);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLevel() {
|
||||
if (logger.getLevel() == null) {
|
||||
return null;
|
||||
}
|
||||
return logger.getLevel().toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return logger.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isTraceEnabled() {
|
||||
return logger.isTraceEnabled();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDebugEnabled() {
|
||||
return logger.isDebugEnabled();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInfoEnabled() {
|
||||
return logger.isInfoEnabled();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isWarnEnabled() {
|
||||
return logger.isEnabledFor(Level.WARN);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isErrorEnabled() {
|
||||
return logger.isEnabledFor(Level.ERROR);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalTrace(String msg) {
|
||||
logger.log(FQCN, Level.TRACE, msg, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalTrace(String msg, Throwable cause) {
|
||||
logger.log(FQCN, Level.TRACE, msg, cause);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalDebug(String msg) {
|
||||
logger.log(FQCN, Level.DEBUG, msg, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalDebug(String msg, Throwable cause) {
|
||||
logger.log(FQCN, Level.DEBUG, msg, cause);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalInfo(String msg) {
|
||||
logger.log(FQCN, Level.INFO, msg, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalInfo(String msg, Throwable cause) {
|
||||
logger.log(FQCN, Level.INFO, msg, cause);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalWarn(String msg) {
|
||||
logger.log(FQCN, Level.WARN, msg, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalWarn(String msg, Throwable cause) {
|
||||
logger.log(FQCN, Level.WARN, msg, cause);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalError(String msg) {
|
||||
logger.log(FQCN, Level.ERROR, msg, null);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalError(String msg, Throwable cause) {
|
||||
logger.log(FQCN, Level.ERROR, msg, cause);
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.log4j;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class Log4jESLoggerFactory extends ESLoggerFactory {
|
||||
|
||||
@Override
|
||||
protected ESLogger rootLogger() {
|
||||
return new Log4jESLogger(null, Logger.getRootLogger());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ESLogger newInstance(String prefix, String name) {
|
||||
final org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(name);
|
||||
return new Log4jESLogger(prefix, logger);
|
||||
}
|
||||
}
|
|
@ -1,179 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.slf4j;
|
||||
|
||||
import org.elasticsearch.common.logging.support.AbstractESLogger;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.spi.LocationAwareLogger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class Slf4jESLogger extends AbstractESLogger {
|
||||
|
||||
private final Logger logger;
|
||||
private final LocationAwareLogger lALogger;
|
||||
private final String FQCN = AbstractESLogger.class.getName();
|
||||
|
||||
public Slf4jESLogger(String prefix, Logger logger) {
|
||||
super(prefix);
|
||||
this.logger = logger;
|
||||
if (logger instanceof LocationAwareLogger) {
|
||||
lALogger = (LocationAwareLogger) logger;
|
||||
} else {
|
||||
lALogger = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLevel(String level) {
|
||||
// can't set it in slf4j...
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLevel() {
|
||||
// can't get it in slf4j...
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return logger.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isTraceEnabled() {
|
||||
return logger.isTraceEnabled();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDebugEnabled() {
|
||||
return logger.isDebugEnabled();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInfoEnabled() {
|
||||
return logger.isInfoEnabled();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isWarnEnabled() {
|
||||
return logger.isWarnEnabled();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isErrorEnabled() {
|
||||
return logger.isErrorEnabled();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalTrace(String msg) {
|
||||
if (lALogger != null) {
|
||||
lALogger.log(null, FQCN, LocationAwareLogger.TRACE_INT, msg, null, null);
|
||||
} else {
|
||||
logger.trace(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalTrace(String msg, Throwable cause) {
|
||||
if (lALogger != null) {
|
||||
lALogger.log(null, FQCN, LocationAwareLogger.TRACE_INT, msg, null, cause);
|
||||
} else {
|
||||
logger.trace(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalDebug(String msg) {
|
||||
if (lALogger != null) {
|
||||
lALogger.log(null, FQCN, LocationAwareLogger.DEBUG_INT, msg, null, null);
|
||||
} else {
|
||||
logger.debug(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalDebug(String msg, Throwable cause) {
|
||||
if (lALogger != null) {
|
||||
lALogger.log(null, FQCN, LocationAwareLogger.DEBUG_INT, msg, null, cause);
|
||||
} else {
|
||||
logger.debug(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalInfo(String msg) {
|
||||
if (lALogger != null) {
|
||||
lALogger.log(null, FQCN, LocationAwareLogger.INFO_INT, msg, null, null);
|
||||
} else {
|
||||
logger.info(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalInfo(String msg, Throwable cause) {
|
||||
if (lALogger != null) {
|
||||
lALogger.log(null, FQCN, LocationAwareLogger.INFO_INT, msg, null, cause);
|
||||
} else {
|
||||
logger.info(msg, cause);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalWarn(String msg) {
|
||||
if (lALogger != null) {
|
||||
lALogger.log(null, FQCN, LocationAwareLogger.WARN_INT, msg, null, null);
|
||||
} else {
|
||||
logger.warn(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalWarn(String msg, Throwable cause) {
|
||||
if (lALogger != null) {
|
||||
lALogger.log(null, FQCN, LocationAwareLogger.WARN_INT, msg, null, cause);
|
||||
} else {
|
||||
logger.warn(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalError(String msg) {
|
||||
if (lALogger != null) {
|
||||
lALogger.log(null, FQCN, LocationAwareLogger.ERROR_INT, msg, null, null);
|
||||
} else {
|
||||
logger.error(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void internalError(String msg, Throwable cause) {
|
||||
if (lALogger != null) {
|
||||
lALogger.log(null, FQCN, LocationAwareLogger.ERROR_INT, msg, null, cause);
|
||||
} else {
|
||||
logger.error(msg);
|
||||
}
|
||||
}
|
||||
|
||||
protected Logger logger() {
|
||||
return logger;
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.slf4j;
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class Slf4jESLoggerFactory extends ESLoggerFactory {
|
||||
|
||||
@Override
|
||||
protected ESLogger rootLogger() {
|
||||
return getLogger(Logger.ROOT_LOGGER_NAME);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ESLogger newInstance(String prefix, String name) {
|
||||
return new Slf4jESLogger(prefix, LoggerFactory.getLogger(name));
|
||||
}
|
||||
}
|
|
@ -1,133 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.support;
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class AbstractESLogger implements ESLogger {
|
||||
|
||||
private final String prefix;
|
||||
|
||||
protected AbstractESLogger(String prefix) {
|
||||
this.prefix = prefix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPrefix() {
|
||||
return this.prefix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void trace(String msg, Object... params) {
|
||||
if (isTraceEnabled()) {
|
||||
internalTrace(LoggerMessageFormat.format(prefix, msg, params));
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void internalTrace(String msg);
|
||||
|
||||
@Override
|
||||
public void trace(String msg, Throwable cause, Object... params) {
|
||||
if (isTraceEnabled()) {
|
||||
internalTrace(LoggerMessageFormat.format(prefix, msg, params), cause);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void internalTrace(String msg, Throwable cause);
|
||||
|
||||
|
||||
@Override
|
||||
public void debug(String msg, Object... params) {
|
||||
if (isDebugEnabled()) {
|
||||
internalDebug(LoggerMessageFormat.format(prefix, msg, params));
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void internalDebug(String msg);
|
||||
|
||||
@Override
|
||||
public void debug(String msg, Throwable cause, Object... params) {
|
||||
if (isDebugEnabled()) {
|
||||
internalDebug(LoggerMessageFormat.format(prefix, msg, params), cause);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void internalDebug(String msg, Throwable cause);
|
||||
|
||||
|
||||
@Override
|
||||
public void info(String msg, Object... params) {
|
||||
if (isInfoEnabled()) {
|
||||
internalInfo(LoggerMessageFormat.format(prefix, msg, params));
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void internalInfo(String msg);
|
||||
|
||||
@Override
|
||||
public void info(String msg, Throwable cause, Object... params) {
|
||||
if (isInfoEnabled()) {
|
||||
internalInfo(LoggerMessageFormat.format(prefix, msg, params), cause);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void internalInfo(String msg, Throwable cause);
|
||||
|
||||
|
||||
@Override
|
||||
public void warn(String msg, Object... params) {
|
||||
if (isWarnEnabled()) {
|
||||
internalWarn(LoggerMessageFormat.format(prefix, msg, params));
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void internalWarn(String msg);
|
||||
|
||||
@Override
|
||||
public void warn(String msg, Throwable cause, Object... params) {
|
||||
if (isWarnEnabled()) {
|
||||
internalWarn(LoggerMessageFormat.format(prefix, msg, params), cause);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void internalWarn(String msg, Throwable cause);
|
||||
|
||||
|
||||
@Override
|
||||
public void error(String msg, Object... params) {
|
||||
if (isErrorEnabled()) {
|
||||
internalError(LoggerMessageFormat.format(prefix, msg, params));
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void internalError(String msg);
|
||||
|
||||
@Override
|
||||
public void error(String msg, Throwable cause, Object... params) {
|
||||
if (isErrorEnabled()) {
|
||||
internalError(LoggerMessageFormat.format(prefix, msg, params), cause);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void internalError(String msg, Throwable cause);
|
||||
}
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.common.settings;
|
|||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.util.set.Sets;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
|
@ -63,6 +62,11 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||
throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "]");
|
||||
}
|
||||
if (setting.hasComplexMatcher()) {
|
||||
Setting<?> overlappingSetting = findOverlappingSetting(setting, complexMatchers);
|
||||
if (overlappingSetting != null) {
|
||||
throw new IllegalArgumentException("complex setting key: [" + setting.getKey() + "] overlaps existing setting key: [" +
|
||||
overlappingSetting.getKey() + "]");
|
||||
}
|
||||
complexMatchers.putIfAbsent(setting.getKey(), setting);
|
||||
} else {
|
||||
keySettings.putIfAbsent(setting.getKey(), setting);
|
||||
|
@ -410,4 +414,19 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
|
|||
return changed;
|
||||
}
|
||||
|
||||
private static Setting<?> findOverlappingSetting(Setting<?> newSetting, Map<String, Setting<?>> complexMatchers) {
|
||||
assert newSetting.hasComplexMatcher();
|
||||
if (complexMatchers.containsKey(newSetting.getKey())) {
|
||||
// we return null here because we use a putIfAbsent call when inserting into the map, so if it exists then we already checked
|
||||
// the setting to make sure there are no overlapping settings.
|
||||
return null;
|
||||
}
|
||||
|
||||
for (Setting<?> existingSetting : complexMatchers.values()) {
|
||||
if (newSetting.match(existingSetting.getKey()) || existingSetting.match(newSetting.getKey())) {
|
||||
return existingSetting;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -154,7 +154,7 @@ public final class IndexModule {
|
|||
*/
|
||||
public void addIndexStore(String type, BiFunction<IndexSettings, IndexStoreConfig, IndexStore> provider) {
|
||||
if (storeTypes.containsKey(type)) {
|
||||
throw new IllegalArgumentException("key [" + type +"] already registerd");
|
||||
throw new IllegalArgumentException("key [" + type +"] already registered");
|
||||
}
|
||||
storeTypes.put(type, provider);
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ public class CommonGramsTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||
this.words = Analysis.parseCommonWords(env, settings, null, ignoreCase);
|
||||
|
||||
if (this.words == null) {
|
||||
throw new IllegalArgumentException("mising or empty [common_words] or [common_words_path] configuration for common_grams token filter");
|
||||
throw new IllegalArgumentException("missing or empty [common_words] or [common_words_path] configuration for common_grams token filter");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.elasticsearch.common.logging.support.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -178,7 +178,7 @@ public class MultiMatchQueryBuilder extends AbstractQueryBuilder<MultiMatchQuery
|
|||
throw new IllegalArgumentException("[" + NAME + "] requires query value");
|
||||
}
|
||||
if (fields == null) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires fields at initalization time");
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires fields at initialization time");
|
||||
}
|
||||
this.value = value;
|
||||
this.fieldsBoosts = new TreeMap<>();
|
||||
|
|
|
@ -38,7 +38,7 @@ public abstract class BaseTranslogReader implements Comparable<BaseTranslogReade
|
|||
protected final long firstOperationOffset;
|
||||
|
||||
public BaseTranslogReader(long generation, FileChannel channel, Path path, long firstOperationOffset) {
|
||||
assert Translog.parseIdFromFileName(path) == generation : "generation missmatch. Path: " + Translog.parseIdFromFileName(path) + " but generation: " + generation;
|
||||
assert Translog.parseIdFromFileName(path) == generation : "generation mismatch. Path: " + Translog.parseIdFromFileName(path) + " but generation: " + generation;
|
||||
|
||||
this.generation = generation;
|
||||
this.path = path;
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.elasticsearch.common.SuppressForbidden;
|
|||
import org.elasticsearch.common.cli.CliTool;
|
||||
import org.elasticsearch.common.cli.CliToolConfig;
|
||||
import org.elasticsearch.common.cli.Terminal;
|
||||
import org.elasticsearch.common.logging.log4j.LogConfigurator;
|
||||
import org.elasticsearch.common.logging.LogConfigurator;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
|
|
|
@ -102,7 +102,7 @@ public class RestAnalyzeAction extends BaseRestHandler {
|
|||
public static void buildFromContent(BytesReference content, AnalyzeRequest analyzeRequest, ParseFieldMatcher parseFieldMatcher) {
|
||||
try (XContentParser parser = XContentHelper.createParser(content)) {
|
||||
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalArgumentException("Malforrmed content, must start with an object");
|
||||
throw new IllegalArgumentException("Malformed content, must start with an object");
|
||||
} else {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
|
|
|
@ -87,7 +87,7 @@ public class RestSearchScrollAction extends BaseRestHandler {
|
|||
public static void buildFromContent(BytesReference content, SearchScrollRequest searchScrollRequest) {
|
||||
try (XContentParser parser = XContentHelper.createParser(content)) {
|
||||
if (parser.nextToken() != XContentParser.Token.START_OBJECT) {
|
||||
throw new IllegalArgumentException("Malforrmed content, must start with an object");
|
||||
throw new IllegalArgumentException("Malformed content, must start with an object");
|
||||
} else {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.elasticsearch.common.ParseFieldMatcher;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.logging.support.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.logging.LoggerMessageFormat;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
|
|
@ -199,7 +199,7 @@ public class AggregatorFactories {
|
|||
List<PipelineAggregatorBuilder<?>> orderedPipelineAggregators, List<PipelineAggregatorBuilder<?>> unmarkedBuilders,
|
||||
Set<PipelineAggregatorBuilder<?>> temporarilyMarked, PipelineAggregatorBuilder<?> builder) {
|
||||
if (temporarilyMarked.contains(builder)) {
|
||||
throw new IllegalArgumentException("Cyclical dependancy found with pipeline aggregator [" + builder.getName() + "]");
|
||||
throw new IllegalArgumentException("Cyclical dependency found with pipeline aggregator [" + builder.getName() + "]");
|
||||
} else if (unmarkedBuilders.contains(builder)) {
|
||||
temporarilyMarked.add(builder);
|
||||
String[] bucketsPaths = builder.getBucketsPaths();
|
||||
|
|
|
@ -228,7 +228,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
metaData);
|
||||
}
|
||||
|
||||
throw new AggregationExecutionException("sigfnificant_terms aggregation cannot be applied to field ["
|
||||
throw new AggregationExecutionException("significant_terms aggregation cannot be applied to field ["
|
||||
+ config.fieldContext().field() + "]. It can only be applied to numeric or string fields.");
|
||||
}
|
||||
|
||||
|
|
|
@ -83,10 +83,8 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
};
|
||||
|
||||
public static final ScoreDoc[] EMPTY_DOCS = new ScoreDoc[0];
|
||||
public static final String SEARCH_CONTROLLER_OPTIMIZE_SINGLE_SHARD_KEY = "search.controller.optimize_single_shard";
|
||||
|
||||
private final BigArrays bigArrays;
|
||||
private final boolean optimizeSingleShard;
|
||||
|
||||
private ScriptService scriptService;
|
||||
|
||||
|
@ -95,11 +93,6 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
super(settings);
|
||||
this.bigArrays = bigArrays;
|
||||
this.scriptService = scriptService;
|
||||
this.optimizeSingleShard = settings.getAsBoolean(SEARCH_CONTROLLER_OPTIMIZE_SINGLE_SHARD_KEY, true);
|
||||
}
|
||||
|
||||
public boolean optimizeSingleShard() {
|
||||
return optimizeSingleShard;
|
||||
}
|
||||
|
||||
public AggregatedDfs aggregateDfs(AtomicArray<DfsSearchResult> results) {
|
||||
|
@ -168,50 +161,48 @@ public class SearchPhaseController extends AbstractComponent {
|
|||
return EMPTY_DOCS;
|
||||
}
|
||||
|
||||
if (optimizeSingleShard) {
|
||||
boolean canOptimize = false;
|
||||
QuerySearchResult result = null;
|
||||
int shardIndex = -1;
|
||||
if (results.size() == 1) {
|
||||
canOptimize = true;
|
||||
result = results.get(0).value.queryResult();
|
||||
shardIndex = results.get(0).index;
|
||||
} else {
|
||||
// lets see if we only got hits from a single shard, if so, we can optimize...
|
||||
for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : results) {
|
||||
if (entry.value.queryResult().topDocs().scoreDocs.length > 0) {
|
||||
if (result != null) { // we already have one, can't really optimize
|
||||
canOptimize = false;
|
||||
break;
|
||||
}
|
||||
canOptimize = true;
|
||||
result = entry.value.queryResult();
|
||||
shardIndex = entry.index;
|
||||
boolean canOptimize = false;
|
||||
QuerySearchResult result = null;
|
||||
int shardIndex = -1;
|
||||
if (results.size() == 1) {
|
||||
canOptimize = true;
|
||||
result = results.get(0).value.queryResult();
|
||||
shardIndex = results.get(0).index;
|
||||
} else {
|
||||
// lets see if we only got hits from a single shard, if so, we can optimize...
|
||||
for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : results) {
|
||||
if (entry.value.queryResult().topDocs().scoreDocs.length > 0) {
|
||||
if (result != null) { // we already have one, can't really optimize
|
||||
canOptimize = false;
|
||||
break;
|
||||
}
|
||||
canOptimize = true;
|
||||
result = entry.value.queryResult();
|
||||
shardIndex = entry.index;
|
||||
}
|
||||
}
|
||||
if (canOptimize) {
|
||||
int offset = result.from();
|
||||
if (ignoreFrom) {
|
||||
offset = 0;
|
||||
}
|
||||
ScoreDoc[] scoreDocs = result.topDocs().scoreDocs;
|
||||
if (scoreDocs.length == 0 || scoreDocs.length < offset) {
|
||||
return EMPTY_DOCS;
|
||||
}
|
||||
}
|
||||
if (canOptimize) {
|
||||
int offset = result.from();
|
||||
if (ignoreFrom) {
|
||||
offset = 0;
|
||||
}
|
||||
ScoreDoc[] scoreDocs = result.topDocs().scoreDocs;
|
||||
if (scoreDocs.length == 0 || scoreDocs.length < offset) {
|
||||
return EMPTY_DOCS;
|
||||
}
|
||||
|
||||
int resultDocsSize = result.size();
|
||||
if ((scoreDocs.length - offset) < resultDocsSize) {
|
||||
resultDocsSize = scoreDocs.length - offset;
|
||||
}
|
||||
ScoreDoc[] docs = new ScoreDoc[resultDocsSize];
|
||||
for (int i = 0; i < resultDocsSize; i++) {
|
||||
ScoreDoc scoreDoc = scoreDocs[offset + i];
|
||||
scoreDoc.shardIndex = shardIndex;
|
||||
docs[i] = scoreDoc;
|
||||
}
|
||||
return docs;
|
||||
int resultDocsSize = result.size();
|
||||
if ((scoreDocs.length - offset) < resultDocsSize) {
|
||||
resultDocsSize = scoreDocs.length - offset;
|
||||
}
|
||||
ScoreDoc[] docs = new ScoreDoc[resultDocsSize];
|
||||
for (int i = 0; i < resultDocsSize; i++) {
|
||||
ScoreDoc scoreDoc = scoreDocs[offset + i];
|
||||
scoreDoc.shardIndex = shardIndex;
|
||||
docs[i] = scoreDoc;
|
||||
}
|
||||
return docs;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
|
|
@ -150,9 +150,9 @@ public class BulkRequestTests extends ESTestCase {
|
|||
BulkRequest bulkRequest = new BulkRequest();
|
||||
try {
|
||||
bulkRequest.add(bulkAction.getBytes(StandardCharsets.UTF_8), 0, bulkAction.length(), null, null);
|
||||
fail("should have thrown an exception about the unknown paramater _foo");
|
||||
fail("should have thrown an exception about the unknown parameter _foo");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat("message contains error about the unknown paramater _foo: " + e.getMessage(),
|
||||
assertThat("message contains error about the unknown parameter _foo: " + e.getMessage(),
|
||||
e.getMessage().contains("Action/metadata line [3] contains an unknown parameter [_foo]"), equalTo(true));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -674,7 +674,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
|
|||
} else {
|
||||
try {
|
||||
indexNameExpressionResolver.concreteIndices(context, "Foo*");
|
||||
fail("expecting exeption when result empty and allowNoIndicec=false");
|
||||
fail("expecting exception when result empty and allowNoIndicec=false");
|
||||
} catch (IndexNotFoundException e) {
|
||||
// expected exception
|
||||
}
|
||||
|
|
|
@ -713,7 +713,7 @@ public class ClusterRebalanceRoutingTests extends ESAllocationTestCase {
|
|||
assertThat(routingTable.index("test1").shard(i).primaryShard().state(), equalTo(INITIALIZING));
|
||||
}
|
||||
|
||||
logger.debug("now start initializing shards and expect exactly one rebalance from node1 to node 2 sicne index [test] is all on node1");
|
||||
logger.debug("now start initializing shards and expect exactly one rebalance from node1 to node 2 since index [test] is all on node1");
|
||||
|
||||
routingNodes = clusterState.getRoutingNodes();
|
||||
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState("test1", INITIALIZING)).routingTable();
|
||||
|
|
|
@ -181,7 +181,7 @@ public class RebalanceAfterActiveTests extends ESAllocationTestCase {
|
|||
}
|
||||
|
||||
|
||||
logger.info("complete relocation, thats it!");
|
||||
logger.info("complete relocation, that's it!");
|
||||
routingNodes = clusterState.getRoutingNodes();
|
||||
prevRoutingTable = routingTable;
|
||||
routingTable = strategy.applyStartedShards(clusterState, routingNodes.shardsWithState(INITIALIZING)).routingTable();
|
||||
|
|
|
@ -17,15 +17,13 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.log4j;
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import org.apache.log4j.AppenderSkeleton;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.log4j.spi.LocationInfo;
|
||||
import org.apache.log4j.spi.LoggingEvent;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -38,7 +36,7 @@ import java.util.List;
|
|||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class Log4jESLoggerTests extends ESTestCase {
|
||||
public class ESLoggerTests extends ESTestCase {
|
||||
|
||||
private ESLogger esTestLogger;
|
||||
private TestAppender testAppender;
|
||||
|
@ -49,7 +47,7 @@ public class Log4jESLoggerTests extends ESTestCase {
|
|||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
this.testLevel = Log4jESLoggerFactory.getLogger("test").getLevel();
|
||||
this.testLevel = ESLoggerFactory.getLogger("test").getLevel();
|
||||
LogConfigurator.reset();
|
||||
Path configDir = getDataPath("config");
|
||||
// Need to set custom path.conf so we can use a custom logging.yml file for the test
|
||||
|
@ -59,18 +57,18 @@ public class Log4jESLoggerTests extends ESTestCase {
|
|||
.build();
|
||||
LogConfigurator.configure(settings, true);
|
||||
|
||||
esTestLogger = Log4jESLoggerFactory.getLogger("test");
|
||||
Logger testLogger = ((Log4jESLogger) esTestLogger).logger();
|
||||
esTestLogger = ESLoggerFactory.getLogger("test");
|
||||
Logger testLogger = esTestLogger.getLogger();
|
||||
assertThat(testLogger.getLevel(), equalTo(Level.TRACE));
|
||||
testAppender = new TestAppender();
|
||||
testLogger.addAppender(testAppender);
|
||||
|
||||
// deprecation setup, needs to be set to debug to log
|
||||
deprecationLogger = Log4jESLoggerFactory.getDeprecationLogger("test");
|
||||
deprecationLogger = ESLoggerFactory.getDeprecationLogger("test");
|
||||
deprecationAppender = new TestAppender();
|
||||
ESLogger logger = Log4jESLoggerFactory.getLogger("deprecation.test");
|
||||
ESLogger logger = ESLoggerFactory.getLogger("deprecation.test");
|
||||
logger.setLevel("DEBUG");
|
||||
(((Log4jESLogger) logger).logger()).addAppender(deprecationAppender);
|
||||
logger.getLogger().addAppender(deprecationAppender);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -78,9 +76,9 @@ public class Log4jESLoggerTests extends ESTestCase {
|
|||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
esTestLogger.setLevel(testLevel);
|
||||
Logger testLogger = ((Log4jESLogger) esTestLogger).logger();
|
||||
Logger testLogger = esTestLogger.getLogger();
|
||||
testLogger.removeAppender(testAppender);
|
||||
Logger deprecationLogger = ((Log4jESLogger) Log4jESLoggerFactory.getLogger("deprecation.test")).logger();
|
||||
Logger deprecationLogger = ESLoggerFactory.getLogger("deprecation.test").getLogger();
|
||||
deprecationLogger.removeAppender(deprecationAppender);
|
||||
}
|
||||
|
||||
|
@ -99,7 +97,7 @@ public class Log4jESLoggerTests extends ESTestCase {
|
|||
assertThat(event.getRenderedMessage(), equalTo("This is an error"));
|
||||
LocationInfo locationInfo = event.getLocationInformation();
|
||||
assertThat(locationInfo, notNullValue());
|
||||
assertThat(locationInfo.getClassName(), equalTo(Log4jESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest"));
|
||||
event = events.get(1);
|
||||
assertThat(event, notNullValue());
|
||||
|
@ -107,7 +105,7 @@ public class Log4jESLoggerTests extends ESTestCase {
|
|||
assertThat(event.getRenderedMessage(), equalTo("This is a warning"));
|
||||
locationInfo = event.getLocationInformation();
|
||||
assertThat(locationInfo, notNullValue());
|
||||
assertThat(locationInfo.getClassName(), equalTo(Log4jESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest"));
|
||||
event = events.get(2);
|
||||
assertThat(event, notNullValue());
|
||||
|
@ -115,7 +113,7 @@ public class Log4jESLoggerTests extends ESTestCase {
|
|||
assertThat(event.getRenderedMessage(), equalTo("This is an info"));
|
||||
locationInfo = event.getLocationInformation();
|
||||
assertThat(locationInfo, notNullValue());
|
||||
assertThat(locationInfo.getClassName(), equalTo(Log4jESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest"));
|
||||
event = events.get(3);
|
||||
assertThat(event, notNullValue());
|
||||
|
@ -123,7 +121,7 @@ public class Log4jESLoggerTests extends ESTestCase {
|
|||
assertThat(event.getRenderedMessage(), equalTo("This is a debug"));
|
||||
locationInfo = event.getLocationInformation();
|
||||
assertThat(locationInfo, notNullValue());
|
||||
assertThat(locationInfo.getClassName(), equalTo(Log4jESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest"));
|
||||
event = events.get(4);
|
||||
assertThat(event, notNullValue());
|
||||
|
@ -131,7 +129,7 @@ public class Log4jESLoggerTests extends ESTestCase {
|
|||
assertThat(event.getRenderedMessage(), equalTo("This is a trace"));
|
||||
locationInfo = event.getLocationInformation();
|
||||
assertThat(locationInfo, notNullValue());
|
||||
assertThat(locationInfo.getClassName(), equalTo(Log4jESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(locationInfo.getClassName(), equalTo(ESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(locationInfo.getMethodName(), equalTo("testLocationInfoTest"));
|
||||
}
|
||||
|
|
@ -17,12 +17,11 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.log4j;
|
||||
package org.elasticsearch.common.logging;
|
||||
|
||||
import org.apache.log4j.Appender;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.elasticsearch.common.cli.CliToolTestCase;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
|
@ -50,7 +49,7 @@ public class LoggingConfigurationTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testResolveMultipleConfigs() throws Exception {
|
||||
String level = Log4jESLoggerFactory.getLogger("test").getLevel();
|
||||
String level = ESLoggerFactory.getLogger("test").getLevel();
|
||||
try {
|
||||
Path configDir = getDataPath("config");
|
||||
Settings settings = Settings.builder()
|
||||
|
@ -59,22 +58,22 @@ public class LoggingConfigurationTests extends ESTestCase {
|
|||
.build();
|
||||
LogConfigurator.configure(settings, true);
|
||||
|
||||
ESLogger esLogger = Log4jESLoggerFactory.getLogger("test");
|
||||
Logger logger = ((Log4jESLogger) esLogger).logger();
|
||||
ESLogger esLogger = ESLoggerFactory.getLogger("test");
|
||||
Logger logger = esLogger.getLogger();
|
||||
Appender appender = logger.getAppender("console");
|
||||
assertThat(appender, notNullValue());
|
||||
|
||||
esLogger = Log4jESLoggerFactory.getLogger("second");
|
||||
logger = ((Log4jESLogger) esLogger).logger();
|
||||
esLogger = ESLoggerFactory.getLogger("second");
|
||||
logger = esLogger.getLogger();
|
||||
appender = logger.getAppender("console2");
|
||||
assertThat(appender, notNullValue());
|
||||
|
||||
esLogger = Log4jESLoggerFactory.getLogger("third");
|
||||
logger = ((Log4jESLogger) esLogger).logger();
|
||||
esLogger = ESLoggerFactory.getLogger("third");
|
||||
logger = esLogger.getLogger();
|
||||
appender = logger.getAppender("console3");
|
||||
assertThat(appender, notNullValue());
|
||||
} finally {
|
||||
Log4jESLoggerFactory.getLogger("test").setLevel(level);
|
||||
ESLoggerFactory.getLogger("test").setLevel(level);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -166,8 +165,8 @@ public class LoggingConfigurationTests extends ESTestCase {
|
|||
.build(), new CliToolTestCase.MockTerminal());
|
||||
LogConfigurator.configure(environment.settings(), true);
|
||||
// args should overwrite whatever is in the config
|
||||
ESLogger esLogger = Log4jESLoggerFactory.getLogger("test_resolve_order");
|
||||
Logger logger = ((Log4jESLogger) esLogger).logger();
|
||||
ESLogger esLogger = ESLoggerFactory.getLogger("test_resolve_order");
|
||||
Logger logger = esLogger.getLogger();
|
||||
Appender appender = logger.getAppender("console");
|
||||
assertThat(appender, notNullValue());
|
||||
assertTrue(logger.isTraceEnabled());
|
||||
|
@ -190,10 +189,10 @@ public class LoggingConfigurationTests extends ESTestCase {
|
|||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.build(), new CliToolTestCase.MockTerminal());
|
||||
LogConfigurator.configure(environment.settings(), false);
|
||||
ESLogger esLogger = Log4jESLoggerFactory.getLogger("test_config_not_read");
|
||||
ESLogger esLogger = ESLoggerFactory.getLogger("test_config_not_read");
|
||||
|
||||
assertNotNull(esLogger);
|
||||
Logger logger = ((Log4jESLogger) esLogger).logger();
|
||||
Logger logger = esLogger.getLogger();
|
||||
Appender appender = logger.getAppender("console");
|
||||
// config was not read
|
||||
assertNull(appender);
|
|
@ -1,131 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common.logging.jdk;
|
||||
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.logging.Handler;
|
||||
import java.util.logging.Level;
|
||||
import java.util.logging.LogRecord;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
public class JDKESLoggerTests extends ESTestCase {
|
||||
|
||||
private ESLogger esTestLogger;
|
||||
private TestHandler testHandler;
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
||||
JdkESLoggerFactory esTestLoggerFactory = new JdkESLoggerFactory();
|
||||
esTestLogger = esTestLoggerFactory.newInstance("test");
|
||||
Logger testLogger = ((JdkESLogger) esTestLogger).logger();
|
||||
testLogger.setLevel(Level.FINEST);
|
||||
assertThat(testLogger.getLevel(), equalTo(Level.FINEST));
|
||||
testHandler = new TestHandler();
|
||||
testLogger.addHandler(testHandler);
|
||||
}
|
||||
|
||||
public void testLocationInfoTest() {
|
||||
esTestLogger.error("This is an error");
|
||||
esTestLogger.warn("This is a warning");
|
||||
esTestLogger.info("This is an info");
|
||||
esTestLogger.debug("This is a debug");
|
||||
esTestLogger.trace("This is a trace");
|
||||
List<LogRecord> records = testHandler.getEvents();
|
||||
assertThat(records, notNullValue());
|
||||
assertThat(records.size(), equalTo(5));
|
||||
LogRecord record = records.get(0);
|
||||
assertThat(record, notNullValue());
|
||||
assertThat(record.getLevel(), equalTo(Level.SEVERE));
|
||||
assertThat(record.getMessage(), equalTo("This is an error"));
|
||||
assertThat(record.getSourceClassName(), equalTo(JDKESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(record.getSourceMethodName(), equalTo("testLocationInfoTest"));
|
||||
record = records.get(1);
|
||||
assertThat(record, notNullValue());
|
||||
assertThat(record.getLevel(), equalTo(Level.WARNING));
|
||||
assertThat(record.getMessage(), equalTo("This is a warning"));
|
||||
assertThat(record.getSourceClassName(), equalTo(JDKESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(record.getSourceMethodName(), equalTo("testLocationInfoTest"));
|
||||
record = records.get(2);
|
||||
assertThat(record, notNullValue());
|
||||
assertThat(record.getLevel(), equalTo(Level.INFO));
|
||||
assertThat(record.getMessage(), equalTo("This is an info"));
|
||||
assertThat(record.getSourceClassName(), equalTo(JDKESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(record.getSourceMethodName(), equalTo("testLocationInfoTest"));
|
||||
record = records.get(3);
|
||||
assertThat(record, notNullValue());
|
||||
assertThat(record.getLevel(), equalTo(Level.FINE));
|
||||
assertThat(record.getMessage(), equalTo("This is a debug"));
|
||||
assertThat(record.getSourceClassName(), equalTo(JDKESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(record.getSourceMethodName(), equalTo("testLocationInfoTest"));
|
||||
record = records.get(4);
|
||||
assertThat(record, notNullValue());
|
||||
assertThat(record.getLevel(), equalTo(Level.FINEST));
|
||||
assertThat(record.getMessage(), equalTo("This is a trace"));
|
||||
assertThat(record.getSourceClassName(), equalTo(JDKESLoggerTests.class.getCanonicalName()));
|
||||
assertThat(record.getSourceMethodName(), equalTo("testLocationInfoTest"));
|
||||
}
|
||||
|
||||
public void testSetLogLevelString() {
|
||||
// verify the string based level-setters
|
||||
esTestLogger.setLevel("error");
|
||||
assertThat(esTestLogger.getLevel(), equalTo("SEVERE"));
|
||||
esTestLogger.setLevel("warn");
|
||||
assertThat(esTestLogger.getLevel(), equalTo("WARNING"));
|
||||
esTestLogger.setLevel("info");
|
||||
assertThat(esTestLogger.getLevel(), equalTo("INFO"));
|
||||
esTestLogger.setLevel("debug");
|
||||
assertThat(esTestLogger.getLevel(), equalTo("FINE"));
|
||||
esTestLogger.setLevel("trace");
|
||||
assertThat(esTestLogger.getLevel(), equalTo("FINEST"));
|
||||
}
|
||||
|
||||
private static class TestHandler extends Handler {
|
||||
|
||||
private List<LogRecord> records = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
}
|
||||
|
||||
public List<LogRecord> getEvents() {
|
||||
return records;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void publish(LogRecord record) {
|
||||
// Forces it to generate the location information
|
||||
record.getSourceClassName();
|
||||
records.add(record);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void flush() {
|
||||
}
|
||||
}
|
||||
}
|
|
@ -31,9 +31,12 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Function;
|
||||
|
||||
public class ScopedSettingsTests extends ESTestCase {
|
||||
|
||||
|
@ -299,4 +302,25 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
ESLoggerFactory.getRootLogger().setLevel(level);
|
||||
}
|
||||
}
|
||||
|
||||
public void testOverlappingComplexMatchSettings() {
|
||||
Set<Setting<?>> settings = new LinkedHashSet<>(2);
|
||||
final boolean groupFirst = randomBoolean();
|
||||
final Setting<?> groupSetting = Setting.groupSetting("foo.", false, Setting.Scope.CLUSTER);
|
||||
final Setting<?> listSetting = Setting.listSetting("foo.bar", Collections.emptyList(), Function.identity(), false,
|
||||
Setting.Scope.CLUSTER);
|
||||
settings.add(groupFirst ? groupSetting : listSetting);
|
||||
settings.add(groupFirst ? listSetting : groupSetting);
|
||||
|
||||
try {
|
||||
new ClusterSettings(Settings.EMPTY, settings);
|
||||
fail("an exception should have been thrown because settings overlap");
|
||||
} catch (IllegalArgumentException e) {
|
||||
if (groupFirst) {
|
||||
assertEquals("complex setting key: [foo.bar] overlaps existing setting key: [foo.]", e.getMessage());
|
||||
} else {
|
||||
assertEquals("complex setting key: [foo.] overlaps existing setting key: [foo.bar]", e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ public class JsonSettingsLoaderTests extends ESTestCase {
|
|||
fail("expected exception");
|
||||
} catch (SettingsException e) {
|
||||
assertEquals(e.getCause().getClass(), ElasticsearchParseException.class);
|
||||
assertTrue(e.toString().contains("duplicate settings key [foo] found at line number [1], column number [13], previous value [bar], current value [baz]"));
|
||||
assertTrue(e.toString().contains("duplicate settings key [foo] found at line number [1], column number [20], previous value [bar], current value [baz]"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -112,7 +112,7 @@ public class PendingClusterStatesQueueTests extends ESTestCase {
|
|||
for (ClusterStateContext context : queue.pendingStates) {
|
||||
final String pendingMaster = context.state.nodes().masterNodeId();
|
||||
assertThat("found a cluster state from [" + pendingMaster
|
||||
+ "], after a state from [" + processedMaster + "] was proccessed",
|
||||
+ "], after a state from [" + processedMaster + "] was processed",
|
||||
pendingMaster, equalTo(processedMaster));
|
||||
}
|
||||
// and check all committed contexts from another master were failed
|
||||
|
|
|
@ -156,7 +156,7 @@ public class SimpleDateMappingTests extends ESSingleNodeTestCase {
|
|||
.startObject()
|
||||
.field("date_field_en", "Wed, 06 Dec 2000 02:55:00 -0800")
|
||||
.field("date_field_de", "Mi, 06 Dez 2000 02:55:00 -0800")
|
||||
.field("date_field_default", "Wed, 06 Dec 2000 02:55:00 -0800") // check default - no exception is a successs!
|
||||
.field("date_field_default", "Wed, 06 Dec 2000 02:55:00 -0800") // check default - no exception is a success!
|
||||
.endObject()
|
||||
.bytes());
|
||||
assertNumericTokensEqual(doc, defaultMapper, "date_field_en", "date_field_de");
|
||||
|
|
|
@ -525,7 +525,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
|
||||
try {
|
||||
createIndex("test");
|
||||
fail("index creation should have failed due to alias with existing index name in mathching index template");
|
||||
fail("index creation should have failed due to alias with existing index name in matching index template");
|
||||
} catch(InvalidAliasNameException e) {
|
||||
assertThat(e.getMessage(), equalTo("Invalid alias name [index], an index exists with the same name as the alias"));
|
||||
}
|
||||
|
|
|
@ -83,7 +83,7 @@ public class NettyTransportIT extends ESIntegTestCase {
|
|||
assertThat(clusterIndexHealths.getStatus(), is(ClusterHealthStatus.GREEN));
|
||||
try {
|
||||
transportClient.filterWithHeader(Collections.singletonMap("ERROR", "MY MESSAGE")).admin().cluster().prepareHealth().get();
|
||||
fail("Expected exception, but didnt happen");
|
||||
fail("Expected exception, but didn't happen");
|
||||
} catch (ElasticsearchException e) {
|
||||
assertThat(e.getMessage(), containsString("MY MESSAGE"));
|
||||
assertThat(channelProfileName, is(TransportSettings.DEFAULT_PROFILE));
|
||||
|
|
|
@ -39,7 +39,7 @@ buildscript {
|
|||
}
|
||||
|
||||
// this is common configuration for distributions, but we also add it here for the license check to use
|
||||
ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive().exclude(module: 'slf4j-api')
|
||||
ext.dependencyFiles = project(':core').configurations.runtime.copyRecursive()
|
||||
|
||||
|
||||
/*****************************************************************************
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
123f29333b2c6b3516b14252b6e93226bfcd6e37
|
|
@ -0,0 +1 @@
|
|||
4127b62db028f981e81caa248953c0899d720f98
|
|
@ -1 +0,0 @@
|
|||
1e13c575f914c83761bb8e2aca7dfd9e4c647579
|
|
@ -0,0 +1 @@
|
|||
4282418817ad2be26ce18739461499eae679390f
|
|
@ -1 +0,0 @@
|
|||
395d18c1a1dd730b8026ee59c4067e5d2b45ba6e
|
|
@ -0,0 +1 @@
|
|||
9ccde45d574388371d2c4032d4b853e2d596777e
|
|
@ -1 +0,0 @@
|
|||
4ae23088dd3fae47c66843f2e4251d7255ee140e
|
|
@ -0,0 +1 @@
|
|||
6c5235a523b7d720b2b0e1b850ea14083e342b07
|
|
@ -351,7 +351,7 @@ you can disable this behavior with `pad: false`
|
|||
All the moving average model support a "prediction" mode, which will attempt to extrapolate into the future given the
|
||||
current smoothed, moving average. Depending on the model and parameter, these predictions may or may not be accurate.
|
||||
|
||||
Predictions are enabled by adding a `predict` parameter to any moving average aggregation, specifying the nubmer of
|
||||
Predictions are enabled by adding a `predict` parameter to any moving average aggregation, specifying the number of
|
||||
predictions you would like appended to the end of the series. These predictions will be spaced out at the same interval
|
||||
as your buckets:
|
||||
|
||||
|
|
|
@ -928,8 +928,8 @@ TIME (?!<[0-9])%{HOUR}:%{MINUTE}(?::%{SECOND})(?![0-9])
|
|||
[options="header"]
|
||||
|======
|
||||
| Name | Required | Default | Description
|
||||
| `match_field` | yes | - | The field to use for grok expression parsing
|
||||
| `match_pattern` | yes | - | The grok expression to match and extract named captures with
|
||||
| `field` | yes | - | The field to use for grok expression parsing
|
||||
| `pattern` | yes | - | The grok expression to match and extract named captures with
|
||||
| `pattern_definitions` | no | - | A map of pattern-name and pattern tuples defining custom patterns to be used by the current processor. Patterns matching existing names will override the pre-existing definition.
|
||||
|======
|
||||
|
||||
|
@ -959,8 +959,8 @@ An example pipeline for processing the above document using Grok:
|
|||
"processors": [
|
||||
{
|
||||
"grok": {
|
||||
"match_field": "message",
|
||||
"match_pattern": "%{IP:client} %{WORD:method} %{URIPATHPARAM:request} %{NUMBER:bytes} %{NUMBER:duration}"
|
||||
"field": "message",
|
||||
"pattern": "%{IP:client} %{WORD:method} %{URIPATHPARAM:request} %{NUMBER:bytes} %{NUMBER:duration}"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
@ -990,8 +990,8 @@ An example of a pipeline specifying custom pattern definitions:
|
|||
"processors": [
|
||||
{
|
||||
"grok": {
|
||||
"match_field": "message",
|
||||
"match_pattern": "my %{FAVORITE_DOG:dog} is colored %{RGB:color}"
|
||||
"field": "message",
|
||||
"pattern": "my %{FAVORITE_DOG:dog} is colored %{RGB:color}"
|
||||
"pattern_definitions" : {
|
||||
"FAVORITE_DOG" : "beagle",
|
||||
"RGB" : "RED|GREEN|BLUE"
|
||||
|
|
|
@ -278,7 +278,7 @@ been removed in favour of just using the `analyzer` setting.
|
|||
If just the `analyzer` is set, it will be used at index time and at search time. To use a different analyzer at search time, specify both the `analyzer` and a `search_analyzer`.
|
||||
|
||||
The `index_analyzer`, `search_analyzer`, and `analyzer` type-level settings
|
||||
have also been removed, as is is no longer possible to select fields based on
|
||||
have also been removed, as it is no longer possible to select fields based on
|
||||
the type name.
|
||||
|
||||
The `_analyzer` meta-field, which allowed setting an analyzer per document has
|
||||
|
|
|
@ -505,6 +505,8 @@ thirdPartyAudit.excludes = [
|
|||
'org.python.objectweb.asm.tree.analysis.BasicVerifier',
|
||||
'org.python.objectweb.asm.tree.analysis.Frame',
|
||||
'org.python.objectweb.asm.tree.analysis.SimpleVerifier',
|
||||
'org.slf4j.Logger',
|
||||
'org.slf4j.LoggerFactory',
|
||||
'org.tukaani.xz.ARMOptions',
|
||||
'org.tukaani.xz.ARMThumbOptions',
|
||||
'org.tukaani.xz.DeltaOptions',
|
||||
|
|
|
@ -35,3 +35,8 @@ dependencyLicenses {
|
|||
mapping from: /stax-.*/, to: 'stax'
|
||||
}
|
||||
|
||||
thirdPartyAudit.excludes = [
|
||||
// Optional and not enabled by Elasticsearch
|
||||
'org.slf4j.Logger',
|
||||
'org.slf4j.LoggerFactory',
|
||||
]
|
||||
|
|
|
@ -357,4 +357,8 @@ thirdPartyAudit.excludes = [
|
|||
|
||||
// internal java api: sun.misc.SignalHandler
|
||||
'org.apache.hadoop.util.SignalLogger$Handler',
|
||||
|
||||
// optional dependencies of slf4j-api
|
||||
'org.slf4j.impl.StaticMDCBinder',
|
||||
'org.slf4j.impl.StaticMarkerBinder',
|
||||
]
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
8619e95939167fb37245b5670135e4feb0ec7d50
|
|
@ -0,0 +1,21 @@
|
|||
Copyright (c) 2004-2014 QOS.ch
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
Loading…
Reference in New Issue