Fix wrong logger usages
These misusages were found by the logger usage checker that was re-enabled in the previous commit.
This commit is contained in:
parent
191fadafcc
commit
bf5d425ab9
|
@ -20,6 +20,8 @@
|
||||||
package org.elasticsearch.common.util;
|
package org.elasticsearch.common.util;
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
|
import org.apache.logging.log4j.util.Supplier;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.IOUtils;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.common.logging.Loggers;
|
import org.elasticsearch.common.logging.Loggers;
|
||||||
|
@ -64,8 +66,8 @@ public class IndexFolderUpgrader {
|
||||||
} catch (NoSuchFileException | FileNotFoundException exception) {
|
} catch (NoSuchFileException | FileNotFoundException exception) {
|
||||||
// thrown when the source is non-existent because the folder was renamed
|
// thrown when the source is non-existent because the folder was renamed
|
||||||
// by another node (shared FS) after we checked if the target exists
|
// by another node (shared FS) after we checked if the target exists
|
||||||
logger.error("multiple nodes trying to upgrade [{}] in parallel, retry upgrading with single node",
|
logger.error((Supplier<?>) () -> new ParameterizedMessage("multiple nodes trying to upgrade [{}] in parallel, retry " +
|
||||||
exception, target);
|
"upgrading with single node", target), exception);
|
||||||
throw exception;
|
throw exception;
|
||||||
} finally {
|
} finally {
|
||||||
if (success) {
|
if (success) {
|
||||||
|
|
|
@ -479,7 +479,9 @@ public abstract class Engine implements Closeable {
|
||||||
try {
|
try {
|
||||||
length = directory.fileLength(file);
|
length = directory.fileLength(file);
|
||||||
} catch (NoSuchFileException | FileNotFoundException e) {
|
} catch (NoSuchFileException | FileNotFoundException e) {
|
||||||
logger.warn("Tried to query fileLength but file is gone [{}] [{}]", e, directory, file);
|
final Directory finalDirectory = directory;
|
||||||
|
logger.warn((Supplier<?>)
|
||||||
|
() -> new ParameterizedMessage("Tried to query fileLength but file is gone [{}] [{}]", finalDirectory, file), e);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
final Directory finalDirectory = directory;
|
final Directory finalDirectory = directory;
|
||||||
logger.warn(
|
logger.warn(
|
||||||
|
|
|
@ -388,7 +388,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
||||||
} catch (FileNotFoundException | NoSuchFileException ex) {
|
} catch (FileNotFoundException | NoSuchFileException ex) {
|
||||||
logger.info("Failed to open / find files while reading metadata snapshot");
|
logger.info("Failed to open / find files while reading metadata snapshot");
|
||||||
} catch (ShardLockObtainFailedException ex) {
|
} catch (ShardLockObtainFailedException ex) {
|
||||||
logger.info("{}: failed to obtain shard lock", ex, shardId);
|
logger.info((Supplier<?>) () -> new ParameterizedMessage("{}: failed to obtain shard lock", shardId), ex);
|
||||||
}
|
}
|
||||||
return MetadataSnapshot.EMPTY;
|
return MetadataSnapshot.EMPTY;
|
||||||
}
|
}
|
||||||
|
@ -420,7 +420,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
|
||||||
SegmentInfos segInfo = Lucene.readSegmentInfos(dir);
|
SegmentInfos segInfo = Lucene.readSegmentInfos(dir);
|
||||||
logger.trace("{} loaded segment info [{}]", shardId, segInfo);
|
logger.trace("{} loaded segment info [{}]", shardId, segInfo);
|
||||||
} catch (ShardLockObtainFailedException ex) {
|
} catch (ShardLockObtainFailedException ex) {
|
||||||
logger.error("{} unable to acquire shard lock", ex, shardId);
|
logger.error((Supplier<?>) () -> new ParameterizedMessage("{} unable to acquire shard lock", shardId), ex);
|
||||||
throw new IOException(ex);
|
throw new IOException(ex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -386,7 +386,7 @@ public class IndexingMemoryController extends AbstractComponent implements Index
|
||||||
try {
|
try {
|
||||||
shard.checkIdle(inactiveTimeNS);
|
shard.checkIdle(inactiveTimeNS);
|
||||||
} catch (EngineClosedException e) {
|
} catch (EngineClosedException e) {
|
||||||
logger.trace("ignore exception while checking if shard {} is inactive", e, shard.shardId());
|
logger.trace((Supplier<?>) () -> new ParameterizedMessage("ignore exception while checking if shard {} is inactive", shard.shardId()), e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -245,7 +245,7 @@ public class PluginsService extends AbstractComponent {
|
||||||
try {
|
try {
|
||||||
reference.onModuleMethod.invoke(plugin.v2(), module);
|
reference.onModuleMethod.invoke(plugin.v2(), module);
|
||||||
} catch (IllegalAccessException | InvocationTargetException e) {
|
} catch (IllegalAccessException | InvocationTargetException e) {
|
||||||
logger.warn("plugin {}, failed to invoke custom onModule method", e, plugin.v1().getName());
|
logger.warn((Supplier<?>) () -> new ParameterizedMessage("plugin {}, failed to invoke custom onModule method", plugin.v1().getName()), e);
|
||||||
throw new ElasticsearchException("failed to invoke onModule", e);
|
throw new ElasticsearchException("failed to invoke onModule", e);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
logger.warn((Supplier<?>) () -> new ParameterizedMessage("plugin {}, failed to invoke custom onModule method", plugin.v1().getName()), e);
|
logger.warn((Supplier<?>) () -> new ParameterizedMessage("plugin {}, failed to invoke custom onModule method", plugin.v1().getName()), e);
|
||||||
|
|
|
@ -395,7 +395,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
} catch (SnapshotMissingException ex) {
|
} catch (SnapshotMissingException ex) {
|
||||||
throw ex;
|
throw ex;
|
||||||
} catch (IllegalStateException | SnapshotException | ElasticsearchParseException ex) {
|
} catch (IllegalStateException | SnapshotException | ElasticsearchParseException ex) {
|
||||||
logger.warn("cannot read snapshot file [{}]", ex, snapshotId);
|
logger.warn((Supplier<?>) () -> new ParameterizedMessage("cannot read snapshot file [{}]", snapshotId), ex);
|
||||||
}
|
}
|
||||||
MetaData metaData = null;
|
MetaData metaData = null;
|
||||||
try {
|
try {
|
||||||
|
@ -405,7 +405,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
metaData = readSnapshotMetaData(snapshotId, null, repositoryData.resolveIndices(indices), true);
|
metaData = readSnapshotMetaData(snapshotId, null, repositoryData.resolveIndices(indices), true);
|
||||||
}
|
}
|
||||||
} catch (IOException | SnapshotException ex) {
|
} catch (IOException | SnapshotException ex) {
|
||||||
logger.warn("cannot read metadata for snapshot [{}]", ex, snapshotId);
|
logger.warn((Supplier<?>) () -> new ParameterizedMessage("cannot read metadata for snapshot [{}]", snapshotId), ex);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
// Delete snapshot from the index file, since it is the maintainer of truth of active snapshots
|
// Delete snapshot from the index file, since it is the maintainer of truth of active snapshots
|
||||||
|
@ -601,7 +601,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
|
||||||
metaDataBuilder.put(indexMetaDataFormat(snapshotVersion).read(indexMetaDataBlobContainer, snapshotId.getUUID()), false);
|
metaDataBuilder.put(indexMetaDataFormat(snapshotVersion).read(indexMetaDataBlobContainer, snapshotId.getUUID()), false);
|
||||||
} catch (ElasticsearchParseException | IOException ex) {
|
} catch (ElasticsearchParseException | IOException ex) {
|
||||||
if (ignoreIndexErrors) {
|
if (ignoreIndexErrors) {
|
||||||
logger.warn("[{}] [{}] failed to read metadata for index", ex, snapshotId, index.getName());
|
logger.warn((Supplier<?>) () -> new ParameterizedMessage("[{}] [{}] failed to read metadata for index", snapshotId, index.getName()), ex);
|
||||||
} else {
|
} else {
|
||||||
throw ex;
|
throw ex;
|
||||||
}
|
}
|
||||||
|
|
|
@ -529,7 +529,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase {
|
||||||
} catch (InterruptedException e) {
|
} catch (InterruptedException e) {
|
||||||
// fine - semaphore interrupt
|
// fine - semaphore interrupt
|
||||||
} catch (AssertionError | Exception e) {
|
} catch (AssertionError | Exception e) {
|
||||||
logger.info("unexpected exception in background thread of [{}]", e, node);
|
logger.info((Supplier<?>) () -> new ParameterizedMessage("unexpected exception in background thread of [{}]", node), e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -315,7 +315,7 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri
|
||||||
}
|
}
|
||||||
throw ae;
|
throw ae;
|
||||||
} catch (Exception | NoClassDefFoundError e) {
|
} catch (Exception | NoClassDefFoundError e) {
|
||||||
logger.trace("failed to run {}", e, compiledScript);
|
logger.trace((Supplier<?>) () -> new ParameterizedMessage("failed to run {}", compiledScript), e);
|
||||||
throw new ScriptException("Error evaluating " + compiledScript.name(), e, emptyList(), "", compiledScript.lang());
|
throw new ScriptException("Error evaluating " + compiledScript.name(), e, emptyList(), "", compiledScript.lang());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue