diff --git a/nifi-api/src/main/java/org/apache/nifi/documentation/init/NopComponentLog.java b/nifi-api/src/main/java/org/apache/nifi/documentation/init/NopComponentLog.java
index 0ecb3d93de..e2187a52ae 100644
--- a/nifi-api/src/main/java/org/apache/nifi/documentation/init/NopComponentLog.java
+++ b/nifi-api/src/main/java/org/apache/nifi/documentation/init/NopComponentLog.java
@@ -27,7 +27,7 @@ public class NopComponentLog implements ComponentLog {
}
@Override
- public void warn(final String msg, final Object[] os) {
+ public void warn(final String msg, final Object... os) {
}
@@ -52,7 +52,7 @@ public class NopComponentLog implements ComponentLog {
}
@Override
- public void trace(final String msg, final Object[] os) {
+ public void trace(final String msg, final Object... os) {
}
@@ -102,7 +102,7 @@ public class NopComponentLog implements ComponentLog {
}
@Override
- public void info(final String msg, final Object[] os) {
+ public void info(final String msg, final Object... os) {
}
@@ -132,7 +132,7 @@ public class NopComponentLog implements ComponentLog {
}
@Override
- public void error(final String msg, final Object[] os) {
+ public void error(final String msg, final Object... os) {
}
@@ -157,7 +157,7 @@ public class NopComponentLog implements ComponentLog {
}
@Override
- public void debug(final String msg, final Object[] os) {
+ public void debug(final String msg, final Object... os) {
}
@@ -182,7 +182,7 @@ public class NopComponentLog implements ComponentLog {
}
@Override
- public void log(final LogLevel level, final String msg, final Object[] os) {
+ public void log(final LogLevel level, final String msg, final Object... os) {
}
diff --git a/nifi-api/src/main/java/org/apache/nifi/logging/ComponentLog.java b/nifi-api/src/main/java/org/apache/nifi/logging/ComponentLog.java
index 30b6b510e8..dd65aedbb2 100644
--- a/nifi-api/src/main/java/org/apache/nifi/logging/ComponentLog.java
+++ b/nifi-api/src/main/java/org/apache/nifi/logging/ComponentLog.java
@@ -31,7 +31,7 @@ package org.apache.nifi.logging;
* within the same NiFi instance.
*
*
- * If the last value in an Object[] argument that is passed to the logger is a
+ * If the last value in an Object... argument that is passed to the logger is a
* Throwable, then the logged message will include a toString()
of
* the Throwable; in addition, if the component's logger is set to DEBUG level
* via the logback configuration, the Stacktrace will also be logged. This
@@ -53,6 +53,7 @@ public interface ComponentLog {
void warn(String msg, Object... os);
+ @Deprecated
void warn(String msg, Object[] os, Throwable t);
void warn(String msg);
@@ -67,6 +68,7 @@ public interface ComponentLog {
void trace(String msg);
+ @Deprecated
void trace(String msg, Object[] os, Throwable t);
default void trace(LogMessage logMessage) {
@@ -89,6 +91,7 @@ public interface ComponentLog {
void info(String msg);
+ @Deprecated
void info(String msg, Object[] os, Throwable t);
default void info(LogMessage logMessage) {
@@ -103,6 +106,7 @@ public interface ComponentLog {
void error(String msg);
+ @Deprecated
void error(String msg, Object[] os, Throwable t);
default void error(LogMessage logMessage) {
@@ -113,6 +117,7 @@ public interface ComponentLog {
void debug(String msg, Object... os);
+ @Deprecated
void debug(String msg, Object[] os, Throwable t);
void debug(String msg);
@@ -184,6 +189,7 @@ public interface ComponentLog {
}
}
+ @Deprecated
default void log(LogLevel level, String msg, Object[] os, Throwable t) {
switch (level) {
case DEBUG:
diff --git a/nifi-commons/nifi-logging-utils/src/main/java/org/apache/nifi/logging/NiFiLog.java b/nifi-commons/nifi-logging-utils/src/main/java/org/apache/nifi/logging/NiFiLog.java
index 3de8518625..8f6f1d954f 100644
--- a/nifi-commons/nifi-logging-utils/src/main/java/org/apache/nifi/logging/NiFiLog.java
+++ b/nifi-commons/nifi-logging-utils/src/main/java/org/apache/nifi/logging/NiFiLog.java
@@ -44,7 +44,7 @@ public class NiFiLog implements Logger {
}
@Override
- public void warn(Marker marker, String string, Object[] os) {
+ public void warn(Marker marker, String string, Object... os) {
logger.warn(marker, string, os);
}
@@ -78,7 +78,7 @@ public class NiFiLog implements Logger {
}
@Override
- public void warn(String string, Object[] os) {
+ public void warn(String string, Object... os) {
logger.warn(string, os);
}
@@ -98,7 +98,7 @@ public class NiFiLog implements Logger {
}
@Override
- public void trace(Marker marker, String string, Object[] os) {
+ public void trace(Marker marker, String string, Object... os) {
logger.trace(marker, string, os);
}
@@ -123,7 +123,7 @@ public class NiFiLog implements Logger {
}
@Override
- public void trace(String string, Object[] os) {
+ public void trace(String string, Object... os) {
logger.trace(string, os);
}
@@ -202,7 +202,7 @@ public class NiFiLog implements Logger {
}
@Override
- public void info(Marker marker, String string, Object[] os) {
+ public void info(Marker marker, String string, Object... os) {
logger.info(marker, string, os);
}
@@ -231,7 +231,7 @@ public class NiFiLog implements Logger {
}
@Override
- public void info(String string, Object[] os) {
+ public void info(String string, Object... os) {
logger.info(string, os);
}
@@ -265,7 +265,7 @@ public class NiFiLog implements Logger {
}
@Override
- public void error(Marker marker, String string, Object[] os) {
+ public void error(Marker marker, String string, Object... os) {
logger.error(marker, string, os);
}
@@ -294,7 +294,7 @@ public class NiFiLog implements Logger {
}
@Override
- public void error(String string, Object[] os) {
+ public void error(String string, Object... os) {
logger.error(string, os);
}
@@ -319,7 +319,7 @@ public class NiFiLog implements Logger {
}
@Override
- public void debug(Marker marker, String string, Object[] os) {
+ public void debug(Marker marker, String string, Object... os) {
logger.debug(marker, string, os);
}
@@ -344,7 +344,7 @@ public class NiFiLog implements Logger {
}
@Override
- public void debug(String string, Object[] os) {
+ public void debug(String string, Object... os) {
logger.debug(string, os);
}
diff --git a/nifi-mock/src/main/java/org/apache/nifi/util/CapturingLogger.java b/nifi-mock/src/main/java/org/apache/nifi/util/CapturingLogger.java
index 10af935fea..3a947aec4d 100644
--- a/nifi-mock/src/main/java/org/apache/nifi/util/CapturingLogger.java
+++ b/nifi-mock/src/main/java/org/apache/nifi/util/CapturingLogger.java
@@ -124,9 +124,9 @@ public class CapturingLogger implements Logger {
}
@Override
- public void trace(Marker marker, String format, Object... argArray) {
- traceMessages.add(new LogMessage(marker, format, null, argArray));
- logger.trace(marker, format, argArray);
+ public void trace(Marker marker, String format, Object... arguments) {
+ traceMessages.add(new LogMessage(marker, format, null, arguments));
+ logger.trace(marker, format, arguments);
}
@Override
@@ -197,9 +197,9 @@ public class CapturingLogger implements Logger {
}
@Override
- public void debug(Marker marker, String format, Object... argArray) {
- debugMessages.add(new LogMessage(marker, format, null, argArray));
- logger.debug(marker, format, argArray);
+ public void debug(Marker marker, String format, Object... arguments) {
+ debugMessages.add(new LogMessage(marker, format, null, arguments));
+ logger.debug(marker, format, arguments);
}
@Override
@@ -268,9 +268,9 @@ public class CapturingLogger implements Logger {
}
@Override
- public void info(Marker marker, String format, Object... argArray) {
- infoMessages.add(new LogMessage(marker, format, null, argArray));
- logger.info(marker, format, argArray);
+ public void info(Marker marker, String format, Object... arguments) {
+ infoMessages.add(new LogMessage(marker, format, null, arguments));
+ logger.info(marker, format, arguments);
}
@Override
@@ -338,9 +338,9 @@ public class CapturingLogger implements Logger {
}
@Override
- public void warn(Marker marker, String format, Object... argArray) {
- warnMessages.add(new LogMessage(marker, format, null, argArray));
- logger.warn(marker, format, argArray);
+ public void warn(Marker marker, String format, Object... arguments) {
+ warnMessages.add(new LogMessage(marker, format, null, arguments));
+ logger.warn(marker, format, arguments);
}
@Override
@@ -420,10 +420,10 @@ public class CapturingLogger implements Logger {
}
@Override
- public void error(Marker marker, String format, Object... argArray) {
- final String message = MessageFormatter.arrayFormat(format, argArray).getMessage();
- errorMessages.add(new LogMessage(marker, message, null, argArray));
- logger.error(marker, format, argArray);
+ public void error(Marker marker, String format, Object... arguments) {
+ final String message = MessageFormatter.arrayFormat(format, arguments).getMessage();
+ errorMessages.add(new LogMessage(marker, message, null, arguments));
+ logger.error(marker, format, arguments);
}
@Override
diff --git a/nifi-mock/src/main/java/org/apache/nifi/util/MockComponentLog.java b/nifi-mock/src/main/java/org/apache/nifi/util/MockComponentLog.java
index ebdfcabf88..7187aa41c1 100644
--- a/nifi-mock/src/main/java/org/apache/nifi/util/MockComponentLog.java
+++ b/nifi-mock/src/main/java/org/apache/nifi/util/MockComponentLog.java
@@ -121,7 +121,7 @@ public class MockComponentLog implements ComponentLog {
}
@Override
- public void warn(String msg, Object[] os) {
+ public void warn(String msg, Object... os) {
if (lastArgIsException(os)) {
warn(msg, translateException(os), (Throwable) os[os.length - 1]);
} else {
@@ -156,7 +156,7 @@ public class MockComponentLog implements ComponentLog {
}
@Override
- public void trace(String msg, Object[] os) {
+ public void trace(String msg, Object... os) {
msg = "{} " + msg;
os = addProcessor(os);
logger.trace(msg, os);
@@ -215,7 +215,7 @@ public class MockComponentLog implements ComponentLog {
}
@Override
- public void info(String msg, Object[] os) {
+ public void info(String msg, Object... os) {
msg = "{} " + msg;
os = addProcessor(os);
@@ -258,7 +258,7 @@ public class MockComponentLog implements ComponentLog {
}
@Override
- public void error(String msg, Object[] os) {
+ public void error(String msg, Object... os) {
if (lastArgIsException(os)) {
error(msg, translateException(os), (Throwable) os[os.length - 1]);
} else {
@@ -293,7 +293,7 @@ public class MockComponentLog implements ComponentLog {
}
@Override
- public void debug(String msg, Object[] os) {
+ public void debug(String msg, Object... os) {
os = addProcessor(os);
msg = "{} " + msg;
diff --git a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
index a35412f40d..d773dddcd1 100644
--- a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
+++ b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/src/main/java/org/apache/nifi/processors/avro/SplitAvro.java
@@ -234,7 +234,7 @@ public class SplitAvro extends AbstractProcessor {
final FlowFile originalFlowFile = copyAttributesToOriginal(session, flowFile, fragmentIdentifier, splits.size());
session.transfer(originalFlowFile, REL_ORIGINAL);
} catch (ProcessException e) {
- getLogger().error("Failed to split {} due to {}", new Object[]{flowFile, e.getMessage()}, e);
+ getLogger().error("Failed to split {} due to {}", flowFile, e.getMessage(), e);
session.transfer(flowFile, REL_FAILURE);
}
}
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
index 5a1649814e..050ac60a3b 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/s3/ListS3.java
@@ -553,7 +553,7 @@ public class ListS3 extends AbstractS3Processor implements VerifiableProcessor {
writer.finishListing();
} catch (final Exception e) {
- getLogger().error("Failed to list contents of bucket due to {}", new Object[] {e}, e);
+ getLogger().error("Failed to list contents of bucket due to {}", e, e);
writer.finishListingExceptionally(e);
session.rollback();
context.yield();
@@ -664,7 +664,7 @@ public class ListS3 extends AbstractS3Processor implements VerifiableProcessor {
writer.finishListing();
} catch (final Exception e) {
- getLogger().error("Failed to list contents of bucket due to {}", new Object[]{e}, e);
+ getLogger().error("Failed to list contents of bucket due to {}", e, e);
writer.finishListingExceptionally(e);
session.rollback();
context.yield();
@@ -680,7 +680,7 @@ public class ListS3 extends AbstractS3Processor implements VerifiableProcessor {
taggingResult = client.getObjectTagging(new GetObjectTaggingRequest(versionSummary.getBucketName(), versionSummary.getKey()));
} catch (final Exception e) {
getLogger().warn("Failed to obtain Object Tags for S3 Object {} in bucket {}. Will list S3 Object without the object tags",
- new Object[] {versionSummary.getKey(), versionSummary.getBucketName()}, e);
+ versionSummary.getKey(), versionSummary.getBucketName(), e);
}
}
return taggingResult;
@@ -693,7 +693,7 @@ public class ListS3 extends AbstractS3Processor implements VerifiableProcessor {
objectMetadata = client.getObjectMetadata(new GetObjectMetadataRequest(versionSummary.getBucketName(), versionSummary.getKey()));
} catch (final Exception e) {
getLogger().warn("Failed to obtain User Metadata for S3 Object {} in bucket {}. Will list S3 Object without the user metadata",
- new Object[] {versionSummary.getKey(), versionSummary.getBucketName()}, e);
+ versionSummary.getKey(), versionSummary.getBucketName(), e);
}
}
return objectMetadata;
@@ -1003,7 +1003,7 @@ public class ListS3 extends AbstractS3Processor implements VerifiableProcessor {
try {
recordWriter.close();
} catch (IOException e) {
- logger.error("Failed to write listing as Records due to {}", new Object[] {e}, e);
+ logger.error("Failed to write listing as Records due to {}", e, e);
}
session.remove(flowFile);
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/wag/InvokeAWSGatewayApi.java b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/wag/InvokeAWSGatewayApi.java
index d1c2af4acb..73ee986df9 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/wag/InvokeAWSGatewayApi.java
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-processors/src/main/java/org/apache/nifi/processors/aws/wag/InvokeAWSGatewayApi.java
@@ -330,8 +330,7 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
} catch (final Exception e) {
// penalize or yield
if (requestFlowFile != null) {
- logger.error("Routing to {} due to exception: {}",
- new Object[]{REL_FAILURE.getName(), e}, e);
+ logger.error("Routing to {} due to exception: {}", REL_FAILURE.getName(), e, e);
requestFlowFile = session.penalize(requestFlowFile);
requestFlowFile = session
.putAttribute(requestFlowFile, EXCEPTION_CLASS, e.getClass().getName());
@@ -351,8 +350,7 @@ public class InvokeAWSGatewayApi extends AbstractAWSGatewayApiProcessor {
session.remove(responseFlowFile);
}
} catch (final Exception e1) {
- logger.error("Could not cleanup response flowfile due to exception: {}",
- new Object[]{e1}, e1);
+ logger.error("Could not cleanup response flowfile due to exception: {}", e1, e1);
}
}
}
diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/cosmos/document/AbstractAzureCosmosDBProcessor.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/cosmos/document/AbstractAzureCosmosDBProcessor.java
index b92098a3c4..fcc9d0c7a4 100644
--- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/cosmos/document/AbstractAzureCosmosDBProcessor.java
+++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/cosmos/document/AbstractAzureCosmosDBProcessor.java
@@ -202,7 +202,7 @@ public abstract class AbstractAzureCosmosDBProcessor extends AbstractProcessor {
this.container = null;
this.cosmosClient.close();
}catch(CosmosException e) {
- logger.error("Error closing Cosmos DB client due to {}", new Object[] { e.getMessage() }, e);
+ logger.error("Error closing Cosmos DB client due to {}", e.getMessage(), e);
} finally {
this.cosmosClient = null;
}
diff --git a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/cosmos/document/PutAzureCosmosDBRecord.java b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/cosmos/document/PutAzureCosmosDBRecord.java
index 70b07c75b3..d258f8786f 100644
--- a/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/cosmos/document/PutAzureCosmosDBRecord.java
+++ b/nifi-nar-bundles/nifi-azure-bundle/nifi-azure-processors/src/main/java/org/apache/nifi/processors/azure/cosmos/document/PutAzureCosmosDBRecord.java
@@ -195,7 +195,7 @@ public class PutAzureCosmosDBRecord extends AbstractAzureCosmosDBProcessor {
bulkInsert(batch);
}
} catch (SchemaNotFoundException | MalformedRecordException | IOException | CosmosException e) {
- logger.error("PutAzureCosmoDBRecord failed with error: {}", new Object[]{e.getMessage()}, e);
+ logger.error("PutAzureCosmoDBRecord failed with error: {}", e.getMessage(), e);
error = true;
} finally {
if (!error) {
diff --git a/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/src/main/java/org/apache/nifi/processors/cassandra/PutCassandraQL.java b/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/src/main/java/org/apache/nifi/processors/cassandra/PutCassandraQL.java
index bcbaffbe06..5f344c14ca 100644
--- a/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/src/main/java/org/apache/nifi/processors/cassandra/PutCassandraQL.java
+++ b/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/src/main/java/org/apache/nifi/processors/cassandra/PutCassandraQL.java
@@ -257,8 +257,7 @@ public class PutCassandraQL extends AbstractCassandraProcessor {
} catch (final QueryValidationException qve) {
logger.error("The CQL statement {} is invalid due to syntax error, authorization issue, or another "
- + "validation problem; routing {} to failure",
- new Object[]{cql, flowFile}, qve);
+ + "validation problem; routing {} to failure", cql, flowFile, qve);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
diff --git a/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java b/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
index b8c797cd27..96ae4ca8ef 100644
--- a/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
+++ b/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/GeoEnrichIP.java
@@ -113,7 +113,7 @@ public class GeoEnrichIP extends AbstractEnrichIP {
getLogger().warn("Could not resolve the IP for value '{}', contained within the attribute '{}' in " +
"FlowFile '{}'. This is usually caused by issue resolving the appropriate DNS record or " +
"providing the processor with an invalid IP address ",
- new Object[]{ipAttributeValue, IP_ADDRESS_ATTRIBUTE.getDisplayName(), flowFile}, ioe);
+ ipAttributeValue, IP_ADDRESS_ATTRIBUTE.getDisplayName(), flowFile, ioe);
return;
}
@@ -136,7 +136,7 @@ public class GeoEnrichIP extends AbstractEnrichIP {
// Most name or IP resolutions failure should have been triggered in the try loop above but
// environmental conditions may trigger errors during the second resolution as well.
session.transfer(flowFile, REL_NOT_FOUND);
- getLogger().warn("Failure while trying to find enrichment data for {} due to {}", new Object[]{flowFile, ex}, ex);
+ getLogger().warn("Failure while trying to find enrichment data for {} due to {}", flowFile, ex, ex);
return;
} finally {
stopWatch.stop();
diff --git a/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/ISPEnrichIP.java b/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/ISPEnrichIP.java
index 500e4c81b3..9d206c6ebc 100644
--- a/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/ISPEnrichIP.java
+++ b/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/ISPEnrichIP.java
@@ -85,7 +85,7 @@ public class ISPEnrichIP extends AbstractEnrichIP {
getLogger().warn("Could not resolve the IP for value '{}', contained within the attribute '{}' in " +
"FlowFile '{}'. This is usually caused by issue resolving the appropriate DNS record or " +
"providing the processor with an invalid IP address ",
- new Object[]{ipAttributeValue, IP_ADDRESS_ATTRIBUTE.getDisplayName(), flowFile}, ioe);
+ ipAttributeValue, IP_ADDRESS_ATTRIBUTE.getDisplayName(), flowFile, ioe);
return;
}
final StopWatch stopWatch = new StopWatch(true);
@@ -97,7 +97,7 @@ public class ISPEnrichIP extends AbstractEnrichIP {
// Most name or IP resolutions failure should have been triggered in the try loop above but
// environmental conditions may trigger errors during the second resolution as well.
session.transfer(flowFile, REL_NOT_FOUND);
- getLogger().warn("Failure while trying to find enrichment data for {} due to {}", new Object[]{flowFile, ex}, ex);
+ getLogger().warn("Failure while trying to find enrichment data for {} due to {}", flowFile, ex, ex);
return;
}
diff --git a/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/enrich/QueryWhois.java b/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/enrich/QueryWhois.java
index df9e362f7d..7aa7ccfdf7 100644
--- a/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/enrich/QueryWhois.java
+++ b/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/src/main/java/org/apache/nifi/processors/enrich/QueryWhois.java
@@ -327,7 +327,7 @@ public class QueryWhois extends AbstractEnrichProcessor {
if (whoisClient.isConnected()) whoisClient.disconnect();
}
} catch ( IOException e) {
- getLogger().error("Query failed due to {}", new Object[]{e.getMessage()}, e);
+ getLogger().error("Query failed due to {}", e.getMessage(), e);
throw new ProcessException("Error performing Whois Lookup", e);
}
return result;
diff --git a/nifi-nar-bundles/nifi-evtx-bundle/nifi-evtx-processors/src/main/java/org/apache/nifi/processors/evtx/ResultProcessor.java b/nifi-nar-bundles/nifi-evtx-bundle/nifi-evtx-processors/src/main/java/org/apache/nifi/processors/evtx/ResultProcessor.java
index bcb667c701..8177bc3a4f 100644
--- a/nifi-nar-bundles/nifi-evtx-bundle/nifi-evtx-processors/src/main/java/org/apache/nifi/processors/evtx/ResultProcessor.java
+++ b/nifi-nar-bundles/nifi-evtx-bundle/nifi-evtx-processors/src/main/java/org/apache/nifi/processors/evtx/ResultProcessor.java
@@ -40,7 +40,7 @@ public class ResultProcessor {
if (exception == null) {
session.transfer(updated, successRelationship);
} else {
- logger.error(UNABLE_TO_PROCESS_DUE_TO, new Object[]{name, exception}, exception);
+ logger.error(UNABLE_TO_PROCESS_DUE_TO, name, exception, exception);
session.transfer(updated, failureRelationship);
}
}
diff --git a/nifi-nar-bundles/nifi-evtx-bundle/nifi-evtx-processors/src/test/java/org/apache/nifi/processors/evtx/ResultProcessorTest.java b/nifi-nar-bundles/nifi-evtx-bundle/nifi-evtx-processors/src/test/java/org/apache/nifi/processors/evtx/ResultProcessorTest.java
index b8f282e537..66c7a329ef 100644
--- a/nifi-nar-bundles/nifi-evtx-bundle/nifi-evtx-processors/src/test/java/org/apache/nifi/processors/evtx/ResultProcessorTest.java
+++ b/nifi-nar-bundles/nifi-evtx-bundle/nifi-evtx-processors/src/test/java/org/apache/nifi/processors/evtx/ResultProcessorTest.java
@@ -81,6 +81,6 @@ public class ResultProcessorTest {
verify(processSession).putAttribute(flowFile, CoreAttributes.FILENAME.key(), name);
verify(processSession).putAttribute(flowFile, CoreAttributes.MIME_TYPE.key(), MediaType.APPLICATION_XML_UTF_8.toString());
verify(processSession).transfer(flowFile, failureRelationship);
- verify(componentLog).error(eq(ResultProcessor.UNABLE_TO_PROCESS_DUE_TO), any(Object[].class), eq(exception));
+ verify(componentLog).error(eq(ResultProcessor.UNABLE_TO_PROCESS_DUE_TO), any(Object[].class));
}
}
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-bin-manager/src/main/java/org/apache/nifi/processor/util/bin/BinFiles.java b/nifi-nar-bundles/nifi-extension-utils/nifi-bin-manager/src/main/java/org/apache/nifi/processor/util/bin/BinFiles.java
index ba363bb58e..72e30a117b 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-bin-manager/src/main/java/org/apache/nifi/processor/util/bin/BinFiles.java
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-bin-manager/src/main/java/org/apache/nifi/processor/util/bin/BinFiles.java
@@ -288,7 +288,7 @@ public abstract class BinFiles extends AbstractSessionFactoryProcessor {
final String groupingIdentifier = getGroupId(context, flowFile, session);
flowFileGroups.computeIfAbsent(groupingIdentifier, id -> new ArrayList<>()).add(flowFile);
} catch (final Exception e) {
- getLogger().error("Could not determine which Bin to add {} to; will route to failure", new Object[] {flowFile}, e);
+ getLogger().error("Could not determine which Bin to add {} to; will route to failure", flowFile, e);
session.transfer(flowFile, REL_FAILURE);
session.commitAsync();
}
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-event-listen/src/main/java/org/apache/nifi/processor/util/listen/AbstractListenEventBatchingProcessor.java b/nifi-nar-bundles/nifi-extension-utils/nifi-event-listen/src/main/java/org/apache/nifi/processor/util/listen/AbstractListenEventBatchingProcessor.java
index d00858725b..fa23111e29 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-event-listen/src/main/java/org/apache/nifi/processor/util/listen/AbstractListenEventBatchingProcessor.java
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-event-listen/src/main/java/org/apache/nifi/processor/util/listen/AbstractListenEventBatchingProcessor.java
@@ -200,8 +200,7 @@ public abstract class AbstractListenEventBatchingProcessor exte
batch.setFlowFile(appendedFlowFile);
} catch (final Exception e) {
- getLogger().error("Failed to write contents of the message to FlowFile due to {}; will re-queue message and try again",
- new Object[] {e.getMessage()}, e);
+ getLogger().error("Failed to write contents of the message to FlowFile due to {}; will re-queue message and try again", e.getMessage(), e);
errorEvents.offer(event);
break;
}
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-listed-entity/src/main/java/org/apache/nifi/processor/util/list/AbstractListProcessor.java b/nifi-nar-bundles/nifi-extension-utils/nifi-listed-entity/src/main/java/org/apache/nifi/processor/util/list/AbstractListProcessor.java
index b1dc2fb290..c09593f0c6 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-listed-entity/src/main/java/org/apache/nifi/processor/util/list/AbstractListProcessor.java
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-listed-entity/src/main/java/org/apache/nifi/processor/util/list/AbstractListProcessor.java
@@ -555,7 +555,7 @@ public abstract class AbstractListProcessor extends Ab
context.getStateManager().clear(getStateScope(context));
} catch (final IOException re) {
- getLogger().error("Failed to remove previous state from the State Manager.", new Object[]{re.getMessage()}, re);
+ getLogger().error("Failed to remove previous state from the State Manager.", re.getMessage(), re);
context.yield();
return;
}
@@ -565,7 +565,7 @@ public abstract class AbstractListProcessor extends Ab
// comparision in lastModifiedMap to the same entity.
entityList = performListing(context, IGNORE_MIN_TIMESTAMP_VALUE, ListingMode.EXECUTION);
} catch (final IOException pe) {
- getLogger().error("Failed to perform listing on remote host due to {}", new Object[]{pe.getMessage()}, pe);
+ getLogger().error("Failed to perform listing on remote host due to {}", pe.getMessage(), pe);
context.yield();
return;
}
@@ -670,7 +670,7 @@ public abstract class AbstractListProcessor extends Ab
);
}
} catch (final IOException e) {
- getLogger().error("Failed to perform listing on remote host due to {}", new Object[]{e.getMessage()}, e);
+ getLogger().error("Failed to perform listing on remote host due to {}", e.getMessage(), e);
context.yield();
return;
}
@@ -757,7 +757,7 @@ public abstract class AbstractListProcessor extends Ab
// track of when this last executed for consideration of the lag nanos
entityList = performListing(context, minTimestampToListMillis, ListingMode.EXECUTION);
} catch (final IOException e) {
- getLogger().error("Failed to perform listing on remote host due to {}", new Object[]{e.getMessage()}, e);
+ getLogger().error("Failed to perform listing on remote host due to {}", e.getMessage(), e);
context.yield();
return;
}
@@ -1125,7 +1125,7 @@ public abstract class AbstractListProcessor extends Ab
try {
return performListing(context, minTimestampToList, ListingMode.EXECUTION);
} catch (final IOException e) {
- getLogger().error("Failed to perform listing on remote host due to {}", new Object[]{e.getMessage()}, e);
+ getLogger().error("Failed to perform listing on remote host due to {}", e.getMessage(), e);
return Collections.emptyList();
}
}, entity -> createAttributes(entity, context));
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-put-pattern/src/main/java/org/apache/nifi/processor/util/pattern/RollbackOnFailure.java b/nifi-nar-bundles/nifi-extension-utils/nifi-put-pattern/src/main/java/org/apache/nifi/processor/util/pattern/RollbackOnFailure.java
index 2d4d7681db..ae543e576a 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-put-pattern/src/main/java/org/apache/nifi/processor/util/pattern/RollbackOnFailure.java
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-put-pattern/src/main/java/org/apache/nifi/processor/util/pattern/RollbackOnFailure.java
@@ -190,7 +190,7 @@ public class RollbackOnFailure {
// However, keeping failed FlowFile in the incoming relationship would retry it too often.
// So, administratively yield the process.
if (functionContext.isRollbackOnFailure()) {
- logger.warn("Administratively yielding {} after rolling back due to {}", new Object[]{context.getName(), t}, t);
+ logger.warn("Administratively yielding {} after rolling back due to {}", context.getName(), t, t);
context.yield();
}
});
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-reporting-utils/src/main/java/org/apache/nifi/reporting/util/provenance/ProvenanceEventConsumer.java b/nifi-nar-bundles/nifi-extension-utils/nifi-reporting-utils/src/main/java/org/apache/nifi/reporting/util/provenance/ProvenanceEventConsumer.java
index 8adeec4b31..cdacd16d81 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-reporting-utils/src/main/java/org/apache/nifi/reporting/util/provenance/ProvenanceEventConsumer.java
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-reporting-utils/src/main/java/org/apache/nifi/reporting/util/provenance/ProvenanceEventConsumer.java
@@ -247,7 +247,7 @@ public class ProvenanceEventConsumer {
stateManager.setState(newMapOfState, Scope.LOCAL);
} catch (final IOException ioe) {
logger.error("Failed to update state to {} due to {}; this could result in events being re-sent after a restart. The message of {} was: {}",
- new Object[]{lastEventId, ioe, ioe, ioe.getMessage()}, ioe);
+ lastEventId, ioe, ioe, ioe.getMessage(), ioe);
}
return lastEvent.getEventId() + 1;
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/groups/StandardProcessGroup.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/groups/StandardProcessGroup.java
index b519cf08cc..d5e1f7b198 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/groups/StandardProcessGroup.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/groups/StandardProcessGroup.java
@@ -1767,7 +1767,7 @@ public final class StandardProcessGroup implements ProcessGroup {
return scheduler.runProcessorOnce(processor, stopCallback);
} catch (Exception e) {
- processor.getLogger().error("Error while running processor {} once.", new Object[]{processor}, e);
+ processor.getLogger().error("Error while running processor {} once.", processor, e);
return stopProcessor(processor);
} finally {
readLock.unlock();
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/processor/SimpleProcessLogger.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/processor/SimpleProcessLogger.java
index 9f3d7b547f..28af6a99a8 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/processor/SimpleProcessLogger.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/processor/SimpleProcessLogger.java
@@ -70,7 +70,7 @@ public class SimpleProcessLogger implements ComponentLog {
}
@Override
- public void warn(final String msg, final Object[] os) {
+ public void warn(final String msg, final Object... os) {
if (isWarnEnabled()) {
final String componentMessage = getComponentMessage(msg);
final Object[] arguments = insertComponent(os);
@@ -132,7 +132,7 @@ public class SimpleProcessLogger implements ComponentLog {
}
@Override
- public void trace(final String msg, final Object[] os) {
+ public void trace(final String msg, final Object... os) {
if (isTraceEnabled()) {
final String componentMessage = getComponentMessage(msg);
final Object[] arguments = insertComponent(os);
@@ -219,7 +219,7 @@ public class SimpleProcessLogger implements ComponentLog {
}
@Override
- public void info(final String msg, final Object[] os) {
+ public void info(final String msg, final Object... os) {
if (isInfoEnabled()) {
final String componentMessage = getComponentMessage(msg);
final Object[] arguments = insertComponent(os);
@@ -291,7 +291,7 @@ public class SimpleProcessLogger implements ComponentLog {
}
@Override
- public void error(final String msg, final Object[] os) {
+ public void error(final String msg, final Object... os) {
if (isErrorEnabled()) {
final String componentMessage = getComponentMessage(msg);
final Object[] arguments = insertComponent(os);
@@ -348,7 +348,7 @@ public class SimpleProcessLogger implements ComponentLog {
}
@Override
- public void debug(final String msg, final Object[] os) {
+ public void debug(final String msg, final Object... os) {
if (isDebugEnabled()) {
final String componentMessage = getComponentMessage(msg);
final Object[] arguments = insertComponent(os);
@@ -416,7 +416,7 @@ public class SimpleProcessLogger implements ComponentLog {
}
@Override
- public void log(final LogLevel level, final String msg, final Object[] os) {
+ public void log(final LogLevel level, final String msg, final Object... os) {
switch (level) {
case DEBUG:
debug(msg, os);
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/TerminationAwareLogger.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/TerminationAwareLogger.java
index 76677d3f09..d31c46e884 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/TerminationAwareLogger.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core-api/src/main/java/org/apache/nifi/controller/TerminationAwareLogger.java
@@ -54,7 +54,7 @@ public class TerminationAwareLogger implements ComponentLog {
}
@Override
- public void warn(String msg, Object[] os) {
+ public void warn(String msg, Object... os) {
if (isTerminated()) {
logger.debug(getMessage(msg, LogLevel.WARN), os);
return;
@@ -94,7 +94,7 @@ public class TerminationAwareLogger implements ComponentLog {
}
@Override
- public void trace(String msg, Object[] os) {
+ public void trace(String msg, Object... os) {
if (isTerminated()) {
logger.trace(getMessage(msg, LogLevel.TRACE), os);
return;
@@ -159,7 +159,7 @@ public class TerminationAwareLogger implements ComponentLog {
}
@Override
- public void info(String msg, Object[] os) {
+ public void info(String msg, Object... os) {
if (isTerminated()) {
logger.debug(getMessage(msg, LogLevel.INFO), os);
return;
@@ -204,7 +204,7 @@ public class TerminationAwareLogger implements ComponentLog {
}
@Override
- public void error(String msg, Object[] os) {
+ public void error(String msg, Object... os) {
if (isTerminated()) {
logger.debug(getMessage(msg, LogLevel.ERROR), os);
return;
@@ -244,7 +244,7 @@ public class TerminationAwareLogger implements ComponentLog {
}
@Override
- public void debug(String msg, Object[] os) {
+ public void debug(String msg, Object... os) {
if (isTerminated()) {
logger.debug(getMessage(msg, LogLevel.DEBUG), os);
return;
@@ -284,7 +284,7 @@ public class TerminationAwareLogger implements ComponentLog {
}
@Override
- public void log(LogLevel level, String msg, Object[] os) {
+ public void log(LogLevel level, String msg, Object... os) {
if (isTerminated()) {
logger.debug(getMessage(msg, level), os);
return;
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ConnectableTask.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ConnectableTask.java
index 00356896b5..6d493e8cd2 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ConnectableTask.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-core/src/main/java/org/apache/nifi/controller/tasks/ConnectableTask.java
@@ -291,7 +291,7 @@ public class ConnectableTask {
try {
rawSession.commitAsync(null, t -> {
- procLog.error("Failed to commit session {} due to {}; rolling back", new Object[]{rawSession, t.toString()}, t);
+ procLog.error("Failed to commit session {} due to {}; rolling back", rawSession, t.toString(), t);
});
} catch (final TerminatedTaskException tte) {
procLog.debug("Cannot commit Batch Process Session because the Task was forcefully terminated", tte);
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-nar-utils/src/main/java/org/apache/nifi/mock/MockComponentLogger.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-nar-utils/src/main/java/org/apache/nifi/mock/MockComponentLogger.java
index 31ce0ef428..949deadc2f 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-nar-utils/src/main/java/org/apache/nifi/mock/MockComponentLogger.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-framework-nar-utils/src/main/java/org/apache/nifi/mock/MockComponentLogger.java
@@ -35,7 +35,7 @@ public class MockComponentLogger implements ComponentLog {
}
@Override
- public void warn(String msg, Object[] os) {
+ public void warn(String msg, Object... os) {
logger.warn(msg, os);
}
@@ -56,7 +56,7 @@ public class MockComponentLogger implements ComponentLog {
}
@Override
- public void trace(String msg, Object[] os) {
+ public void trace(String msg, Object... os) {
logger.trace(msg, os);
}
@@ -102,7 +102,7 @@ public class MockComponentLogger implements ComponentLog {
}
@Override
- public void info(String msg, Object[] os) {
+ public void info(String msg, Object... os) {
logger.info(msg, os);
}
@@ -129,7 +129,7 @@ public class MockComponentLogger implements ComponentLog {
}
@Override
- public void error(String msg, Object[] os) {
+ public void error(String msg, Object... os) {
logger.error(msg, os);
}
@@ -150,7 +150,7 @@ public class MockComponentLogger implements ComponentLog {
}
@Override
- public void debug(String msg, Object[] os) {
+ public void debug(String msg, Object... os) {
logger.debug(msg, os);
}
diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/PublishGCPubSubLite.java b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/PublishGCPubSubLite.java
index 5b74f2015d..24c3430d0b 100644
--- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/PublishGCPubSubLite.java
+++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/pubsub/lite/PublishGCPubSubLite.java
@@ -233,7 +233,7 @@ public class PublishGCPubSubLite extends AbstractGCPubSubProcessor implements Ve
successfulFlowFiles.addAll(flowFiles);
} catch (InterruptedException | ExecutionException e) {
getLogger().error("Failed to publish the messages to Google Cloud PubSub Lite topic '{}' due to {}, "
- + "routing all messages from the batch to failure", new Object[]{topicName, e.getLocalizedMessage()}, e);
+ + "routing all messages from the batch to failure", topicName, e.getLocalizedMessage(), e);
session.transfer(flowFiles, REL_FAILURE);
context.yield();
}
diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/FetchGCSObject.java b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/FetchGCSObject.java
index 7bf16277ba..50ca5bb6d7 100644
--- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/FetchGCSObject.java
+++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/FetchGCSObject.java
@@ -263,7 +263,7 @@ public class FetchGCSObject extends AbstractGCSProcessor {
final Map attributes = StorageAttributes.createAttributes(blob.blob);
flowFile = session.putAllAttributes(flowFile, attributes);
} catch (final StorageException | IOException e) {
- getLogger().error("Failed to fetch GCS Object due to {}", new Object[] {e}, e);
+ getLogger().error("Failed to fetch GCS Object due to {}", e, e);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
return;
diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java
index 70cc3332fc..706be68485 100644
--- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java
+++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-processors/src/main/java/org/apache/nifi/processors/gcp/storage/PutGCSObject.java
@@ -526,7 +526,7 @@ public class PutGCSObject extends AbstractGCSProcessor {
}
} catch (StorageException e) {
getLogger().error("Failure completing upload flowfile={} bucket={} key={} reason={}",
- new Object[]{ffFilename, bucket, key, e.getMessage()}, e);
+ ffFilename, bucket, key, e.getMessage(), e);
throw (e);
}
@@ -547,7 +547,7 @@ public class PutGCSObject extends AbstractGCSProcessor {
new Object[]{ff, millis});
} catch (final ProcessException | StorageException e) {
- getLogger().error("Failed to put {} to Google Cloud Storage due to {}", new Object[]{flowFile, e.getMessage()}, e);
+ getLogger().error("Failed to put {} to Google Cloud Storage due to {}", flowFile, e.getMessage(), e);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
}
diff --git a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQuery.java b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQuery.java
index d1f2fe2060..386a18be04 100644
--- a/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQuery.java
+++ b/nifi-nar-bundles/nifi-graph-bundle/nifi-graph-processors/src/main/java/org/apache/nifi/processors/graph/ExecuteGraphQuery.java
@@ -142,8 +142,7 @@ public class ExecuteGraphQuery extends AbstractGraphExecutor {
}
} catch (Exception exception) {
- getLogger().error("Failed to execute graph statement due to {}",
- new Object[]{exception.getLocalizedMessage()}, exception);
+ getLogger().error("Failed to execute graph statement due to {}", exception.getLocalizedMessage(), exception);
session.remove(output);
if (flowFile != null) {
flowFile = session.putAttribute(flowFile, ERROR_MESSAGE, String.valueOf(exception.getMessage()));
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
index 011bf3185a..55b69f71b3 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java
@@ -182,7 +182,7 @@ public class CreateHadoopSequenceFile extends AbstractHadoopProcessor {
session.transfer(flowFile, RELATIONSHIP_SUCCESS);
getLogger().info("Transferred flowfile {} to {}", new Object[]{flowFile, RELATIONSHIP_SUCCESS});
} catch (ProcessException e) {
- getLogger().error("Failed to create Sequence File. Transferring {} to 'failure'", new Object[]{flowFile}, e);
+ getLogger().error("Failed to create Sequence File. Transferring {} to 'failure'", flowFile, e);
session.transfer(flowFile, RELATIONSHIP_FAILURE);
}
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/DeleteHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/DeleteHDFS.java
index 6b676f9493..7f9459f03f 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/DeleteHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/DeleteHDFS.java
@@ -201,7 +201,7 @@ public class DeleteHDFS extends AbstractHadoopProcessor {
session.remove(flowFile);
}
} catch (IOException e) {
- getLogger().error("Error processing delete for flowfile {} due to {}", new Object[]{flowFile, e.getMessage()}, e);
+ getLogger().error("Error processing delete for flowfile {} due to {}", flowFile, e.getMessage(), e);
session.transfer(flowFile, getFailureRelationship());
}
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/MoveHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/MoveHDFS.java
index d7a3d13045..33e1fac44c 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/MoveHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/MoveHDFS.java
@@ -284,7 +284,7 @@ public class MoveHDFS extends AbstractHadoopProcessor {
}
}
} catch (Exception e) {
- getLogger().warn("Could not add to processing queue due to {}", new Object[]{e.getMessage()}, e);
+ getLogger().warn("Could not add to processing queue due to {}", e.getMessage(), e);
} finally {
queueLock.unlock();
}
@@ -470,7 +470,7 @@ public class MoveHDFS extends AbstractHadoopProcessor {
hdfs.setOwner(name, owner, group);
}
} catch (Exception e) {
- getLogger().warn("Could not change owner or group of {} on HDFS due to {}", new Object[]{name, e.getMessage()}, e);
+ getLogger().warn("Could not change owner or group of {} on HDFS due to {}", name, e.getMessage(), e);
}
}
diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestListHDFS.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestListHDFS.java
index eb84fd33cf..f69016ec76 100644
--- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestListHDFS.java
+++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestListHDFS.java
@@ -492,7 +492,7 @@ class TestListHDFS {
// check that there are no throwables that are not of JobConf CNFE exceptions
.allMatch(throwable -> throwable instanceof ClassNotFoundException && throwable.getMessage().contains("JobConf")));
verify(mockLogger, never()).error(anyString(), any(Object[].class));
- verify(mockLogger, never()).error(anyString(), any(Object[].class), any(Throwable.class));
+ verify(mockLogger, never()).error(anyString(), any(Object[].class));
// assert that no files were listed
runner.assertAllFlowFilesTransferred(ListHDFS.REL_SUCCESS, 0);
diff --git a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/PutHBaseJSON.java b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/PutHBaseJSON.java
index 03e905a586..fb5c8eb420 100644
--- a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/PutHBaseJSON.java
+++ b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/PutHBaseJSON.java
@@ -189,7 +189,7 @@ public class PutHBaseJSON extends AbstractPutHBase {
}
});
} catch (final ProcessException pe) {
- getLogger().error("Failed to parse {} as JSON due to {}; routing to failure", new Object[]{flowFile, pe.toString()}, pe);
+ getLogger().error("Failed to parse {} as JSON due to {}; routing to failure", flowFile, pe.toString(), pe);
return null;
}
diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/hive/streaming/NiFiRecordSerDe.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/hive/streaming/NiFiRecordSerDe.java
index 51a06c8db1..c23529923d 100644
--- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/hive/streaming/NiFiRecordSerDe.java
+++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/hive/streaming/NiFiRecordSerDe.java
@@ -157,10 +157,10 @@ public class NiFiRecordSerDe extends AbstractSerDe {
populateRecord(result, record.getValue(field), field, schema);
}
} catch(SerDeException se) {
- log.error("Error [{}] parsing Record [{}].", new Object[]{se.toString(), record}, se);
+ log.error("Error [{}] parsing Record [{}].", se.toString(), record, se);
throw se;
} catch (Exception e) {
- log.error("Error [{}] parsing Record [{}].", new Object[]{e.toString(), record}, e);
+ log.error("Error [{}] parsing Record [{}].", e.toString(), record, e);
throw new SerDeException(e);
}
diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/AbstractHive3QLProcessor.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/AbstractHive3QLProcessor.java
index 4fcce19da1..bbfba32380 100644
--- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/AbstractHive3QLProcessor.java
+++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/AbstractHive3QLProcessor.java
@@ -225,7 +225,7 @@ public abstract class AbstractHive3QLProcessor extends AbstractSessionFactoryPro
}
} catch (SQLException e) {
// Log which attribute/parameter had an error, then rethrow to be handled at the top level
- getLogger().error("Error setting parameter {} to value from {} ({})", new Object[]{parameterIndex, attrName, parameterValue}, e);
+ getLogger().error("Error setting parameter {} to value from {} ({})", parameterIndex, attrName, parameterValue, e);
throw e;
}
}
@@ -286,7 +286,7 @@ public abstract class AbstractHive3QLProcessor extends AbstractSessionFactoryPro
node = new ParseDriver().parse(normalize(query));
} catch (ParseException e) {
// If failed to parse the query, just log a message, but continue.
- getLogger().debug("Failed to parse query: {} due to {}", new Object[]{query, e}, e);
+ getLogger().debug("Failed to parse query: {} due to {}", query, e, e);
return Collections.emptySet();
}
final HashSet tableNames = new HashSet<>();
diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3QL.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3QL.java
index 9f24248378..6e4b822a61 100644
--- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3QL.java
+++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3QL.java
@@ -248,7 +248,7 @@ public class PutHive3QL extends AbstractHive3QLProcessor {
tableNames.addAll(findTableNames(hiveQL));
} catch (Exception e) {
// If failed to parse the query, just log a warning message, but continue.
- getLogger().warn("Failed to parse hiveQL: {} due to {}", new Object[]{hiveQL, e}, e);
+ getLogger().warn("Failed to parse hiveQL: {} due to {}", hiveQL, e, e);
}
stmt.setQueryTimeout(context.getProperty(QUERY_TIMEOUT).evaluateAttributeExpressions(flowFile).asInteger());
@@ -276,14 +276,13 @@ public class PutHive3QL extends AbstractHive3QLProcessor {
onFlowFileError = onFlowFileError.andThen((c, i, r, e) -> {
switch (r.destination()) {
case Failure:
- getLogger().error("Failed to update Hive for {} due to {}; routing to failure", new Object[] {i, e}, e);
+ getLogger().error("Failed to update Hive for {} due to {}; routing to failure", i, e, e);
break;
case Retry:
- getLogger().error("Failed to update Hive for {} due to {}; it is possible that retrying the operation will succeed, so routing to retry",
- new Object[] {i, e}, e);
+ getLogger().error("Failed to update Hive for {} due to {}; it is possible that retrying the operation will succeed, so routing to retry", i, e, e);
break;
case Self:
- getLogger().error("Failed to update Hive for {} due to {};", new Object[] {i, e}, e);
+ getLogger().error("Failed to update Hive for {} due to {};", i, e, e);
break;
}
});
diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3Streaming.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3Streaming.java
index ad5555ebad..520e343d22 100644
--- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3Streaming.java
+++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/PutHive3Streaming.java
@@ -517,7 +517,7 @@ public class PutHive3Streaming extends AbstractProcessor {
} else {
log.error(
"Failed to create {} for {} - routing to failure",
- new Object[]{RecordReader.class.getSimpleName(), flowFile},
+ RecordReader.class.getSimpleName(), flowFile,
rrfe
);
session.transfer(flowFile, REL_FAILURE);
@@ -539,14 +539,14 @@ public class PutHive3Streaming extends AbstractProcessor {
flowFile = session.putAllAttributes(flowFile, updateAttributes);
log.error(
"Exception while processing {} - routing to failure",
- new Object[]{flowFile},
+ flowFile,
e
);
session.transfer(flowFile, REL_FAILURE);
}
} catch (DiscontinuedException e) {
// The input FlowFile processing is discontinued. Keep it in the input queue.
- getLogger().warn("Discontinued processing for {} due to {}", new Object[]{flowFile, e}, e);
+ getLogger().warn("Discontinued processing for {} due to {}", flowFile, e, e);
session.transfer(flowFile, Relationship.SELF);
} catch (ConnectionError ce) {
// If we can't connect to the metastore, yield the processor
@@ -579,7 +579,7 @@ public class PutHive3Streaming extends AbstractProcessor {
flowFile = session.putAllAttributes(flowFile, updateAttributes);
log.error(
"Exception while trying to stream {} to hive - routing to failure",
- new Object[]{flowFile},
+ flowFile,
se
);
session.transfer(flowFile, REL_FAILURE);
diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/SelectHive3QL.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/SelectHive3QL.java
index 3dbe43b76e..07b5b8528c 100644
--- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/SelectHive3QL.java
+++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/SelectHive3QL.java
@@ -379,7 +379,7 @@ public class SelectHive3QL extends AbstractHive3QLProcessor {
st.setFetchSize(fetchSize);
} catch (SQLException se) {
// Not all drivers support this, just log the error (at debug level) and move on
- logger.debug("Cannot set fetch size to {} due to {}", new Object[]{fetchSize, se.getLocalizedMessage()}, se);
+ logger.debug("Cannot set fetch size to {} due to {}", fetchSize, se.getLocalizedMessage(), se);
}
}
@@ -455,7 +455,7 @@ public class SelectHive3QL extends AbstractHive3QLProcessor {
attributes.putAll(toQueryTableAttributes(findTableNames(hqlStatement)));
} catch (Exception e) {
// If failed to parse the query, just log a warning message, but continue.
- getLogger().warn("Failed to parse query: {} due to {}", new Object[]{hqlStatement, e}, e);
+ getLogger().warn("Failed to parse query: {} due to {}", hqlStatement, e, e);
}
// Set MIME type on output document and add extension to filename
diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHive3Table.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHive3Table.java
index 5e00e0904f..632220049c 100644
--- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHive3Table.java
+++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/processors/hive/UpdateHive3Table.java
@@ -357,7 +357,7 @@ public class UpdateHive3Table extends AbstractProcessor {
} catch (RecordReaderFactoryException rrfe) {
log.error(
"Failed to create {} for {} - routing to failure",
- new Object[]{RecordReader.class.getSimpleName(), flowFile},
+ RecordReader.class.getSimpleName(), flowFile,
rrfe
);
// Since we are wrapping the exceptions above there should always be a cause
@@ -467,11 +467,11 @@ public class UpdateHive3Table extends AbstractProcessor {
}
} catch (IOException | SQLException e) {
flowFile = session.putAttribute(flowFile, ATTR_OUTPUT_TABLE, tableName);
- log.error("Exception while processing {} - routing to failure", new Object[]{flowFile}, e);
+ log.error("Exception while processing {} - routing to failure", flowFile, e);
session.transfer(flowFile, REL_FAILURE);
} catch (DiscontinuedException e) {
// The input FlowFile processing is discontinued. Keep it in the input queue.
- getLogger().warn("Discontinued processing for {} due to {}", new Object[]{flowFile, e}, e);
+ getLogger().warn("Discontinued processing for {} due to {}", flowFile, e, e);
session.transfer(flowFile, Relationship.SELF);
} catch (Throwable t) {
throw (t instanceof ProcessException) ? (ProcessException) t : new ProcessException(t);
diff --git a/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/src/main/java/org/apache/nifi/GetHTMLElement.java b/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/src/main/java/org/apache/nifi/GetHTMLElement.java
index 8346f03fb4..37dab67b24 100644
--- a/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/src/main/java/org/apache/nifi/GetHTMLElement.java
+++ b/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/src/main/java/org/apache/nifi/GetHTMLElement.java
@@ -168,7 +168,7 @@ public class GetHTMLElement
doc = parseHTMLDocumentFromFlowfile(flowFile, context, session);
eles = doc.select(context.getProperty(CSS_SELECTOR).evaluateAttributeExpressions(flowFile).getValue());
} catch (final Exception ex) {
- getLogger().error("Failed to extract HTML from {} due to {}; routing to {}", new Object[] {flowFile, ex, REL_INVALID_HTML}, ex);
+ getLogger().error("Failed to extract HTML from {} due to {}; routing to {}", flowFile, ex, REL_INVALID_HTML, ex);
session.transfer(flowFile, REL_INVALID_HTML);
return;
}
diff --git a/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/src/main/java/org/apache/nifi/ModifyHTMLElement.java b/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/src/main/java/org/apache/nifi/ModifyHTMLElement.java
index 24f97418a6..76bafa777b 100644
--- a/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/src/main/java/org/apache/nifi/ModifyHTMLElement.java
+++ b/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/src/main/java/org/apache/nifi/ModifyHTMLElement.java
@@ -147,7 +147,7 @@ public class ModifyHTMLElement extends AbstractHTMLProcessor {
doc = parseHTMLDocumentFromFlowfile(flowFile, context, session);
eles = doc.select(context.getProperty(CSS_SELECTOR).evaluateAttributeExpressions(flowFile).getValue());
} catch (Exception ex) {
- getLogger().error("Failed to extract HTML from {} due to {}; routing to {}", new Object[] {flowFile, ex.toString(), REL_INVALID_HTML.getName()}, ex);
+ getLogger().error("Failed to extract HTML from {} due to {}; routing to {}", flowFile, ex.toString(), REL_INVALID_HTML.getName(), ex);
session.transfer(flowFile, REL_INVALID_HTML);
return;
}
diff --git a/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/src/main/java/org/apache/nifi/PutHTMLElement.java b/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/src/main/java/org/apache/nifi/PutHTMLElement.java
index 0d112cb370..c3d4890443 100644
--- a/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/src/main/java/org/apache/nifi/PutHTMLElement.java
+++ b/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/src/main/java/org/apache/nifi/PutHTMLElement.java
@@ -136,7 +136,7 @@ public class PutHTMLElement extends AbstractHTMLProcessor {
doc = parseHTMLDocumentFromFlowfile(flowFile, context, session);
eles = doc.select(context.getProperty(CSS_SELECTOR).evaluateAttributeExpressions(flowFile).getValue());
} catch (Exception ex) {
- getLogger().error("Failed to extract HTML from {} due to {}; routing to {}", new Object[] {flowFile, ex.toString(), REL_INVALID_HTML.getName()}, ex);
+ getLogger().error("Failed to extract HTML from {} due to {}; routing to {}", flowFile, ex.toString(), REL_INVALID_HTML.getName(), ex);
session.transfer(flowFile, REL_INVALID_HTML);
return;
}
diff --git a/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/AbstractInfluxDBProcessor.java b/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/AbstractInfluxDBProcessor.java
index edc413612a..388e3f71f6 100644
--- a/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/AbstractInfluxDBProcessor.java
+++ b/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/AbstractInfluxDBProcessor.java
@@ -120,7 +120,7 @@ public abstract class AbstractInfluxDBProcessor extends AbstractProcessor {
try {
influxDB.set(makeConnection(username, password, influxDbUrl, connectionTimeout));
} catch(Exception e) {
- getLogger().error("Error while getting connection {}", new Object[] { e.getLocalizedMessage() },e);
+ getLogger().error("Error while getting connection {}", e.getLocalizedMessage(), e);
throw new RuntimeException("Error while getting connection " + e.getLocalizedMessage(),e);
}
getLogger().info("InfluxDB connection created for host {}",
diff --git a/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/ExecuteInfluxDBQuery.java b/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/ExecuteInfluxDBQuery.java
index 795cc81060..150380dce4 100644
--- a/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/ExecuteInfluxDBQuery.java
+++ b/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/ExecuteInfluxDBQuery.java
@@ -238,11 +238,11 @@ public class ExecuteInfluxDBQuery extends AbstractInfluxDBProcessor {
outgoingFlowFile = populateErrorAttributes(session, outgoingFlowFile, query, exception.getMessage());
if ( exception.getCause() instanceof SocketTimeoutException ) {
getLogger().error("Failed to read from InfluxDB due SocketTimeoutException to {} and retrying",
- new Object[]{exception.getCause().getLocalizedMessage()}, exception.getCause());
+ exception.getCause().getLocalizedMessage(), exception.getCause());
session.transfer(outgoingFlowFile, REL_RETRY);
} else {
getLogger().error("Failed to read from InfluxDB due to {}",
- new Object[]{exception.getLocalizedMessage()}, exception);
+ exception.getLocalizedMessage(), exception);
session.transfer(outgoingFlowFile, REL_FAILURE);
}
context.yield();
diff --git a/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/PutInfluxDB.java b/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/PutInfluxDB.java
index cfec09d18d..8b850bd0ce 100644
--- a/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/PutInfluxDB.java
+++ b/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/src/main/java/org/apache/nifi/processors/influxdb/PutInfluxDB.java
@@ -178,17 +178,17 @@ public class PutInfluxDB extends AbstractInfluxDBProcessor {
flowFile = session.putAttribute(flowFile, INFLUX_DB_ERROR_MESSAGE, String.valueOf(exception.getMessage()));
if ( exception.getCause() instanceof SocketTimeoutException ) {
getLogger().error("Failed to insert into influxDB due SocketTimeoutException to {} and retrying",
- new Object[]{exception.getLocalizedMessage()}, exception);
+ exception.getLocalizedMessage(), exception);
session.transfer(flowFile, REL_RETRY);
} else {
getLogger().error("Failed to insert into influxDB due to {}",
- new Object[]{exception.getLocalizedMessage()}, exception);
+ exception.getLocalizedMessage(), exception);
session.transfer(flowFile, REL_FAILURE);
}
context.yield();
} catch (Exception exception) {
getLogger().error("Failed to insert into influxDB due to {}",
- new Object[]{exception.getLocalizedMessage()}, exception);
+ exception.getLocalizedMessage(), exception);
flowFile = session.putAttribute(flowFile, INFLUX_DB_ERROR_MESSAGE, String.valueOf(exception.getMessage()));
session.transfer(flowFile, REL_FAILURE);
context.yield();
diff --git a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/JMSConsumer.java b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/JMSConsumer.java
index 141b01d9c7..3c721764ad 100644
--- a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/JMSConsumer.java
+++ b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/src/main/java/org/apache/nifi/jms/processors/JMSConsumer.java
@@ -204,8 +204,7 @@ class JMSConsumer extends JMSWorker {
return null;
}
} catch (final MessageConversionException mce) {
- processLog.error("Received a JMS Message [{}] but failed to obtain the content of the message; will acknowledge this message without creating a FlowFile for it.",
- new Object[] {message}, mce);
+ processLog.error("Received a JMS Message [{}] but failed to obtain the content of the message; will acknowledge this message without creating a FlowFile for it.", message, mce);
acknowledge(message, session);
if (errorQueueName != null) {
diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumeKafkaRecord_2_6.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumeKafkaRecord_2_6.java
index 1fd4963413..94e0393dd6 100644
--- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumeKafkaRecord_2_6.java
+++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumeKafkaRecord_2_6.java
@@ -540,11 +540,9 @@ public class ConsumeKafkaRecord_2_6 extends AbstractProcessor implements KafkaCl
getLogger().warn("Was interrupted while trying to communicate with Kafka with lease {}. "
+ "Will roll back session and discard any partially received data.", lease);
} catch (final KafkaException kex) {
- getLogger().error("Exception while interacting with Kafka so will close the lease {} due to {}",
- new Object[]{lease, kex}, kex);
+ getLogger().error("Exception while interacting with Kafka so will close the lease {} due to {}", lease, kex, kex);
} catch (final Throwable t) {
- getLogger().error("Exception while processing data from kafka so will close the lease {} due to {}",
- new Object[]{lease, t}, t);
+ getLogger().error("Exception while processing data from kafka so will close the lease {} due to {}", lease, t, t);
} finally {
activeLeases.remove(lease);
}
diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumeKafka_2_6.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumeKafka_2_6.java
index 2ac988b291..b9e3191272 100644
--- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumeKafka_2_6.java
+++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/processors/kafka/pubsub/ConsumeKafka_2_6.java
@@ -483,11 +483,9 @@ public class ConsumeKafka_2_6 extends AbstractProcessor implements KafkaClientCo
getLogger().warn("Was interrupted while trying to communicate with Kafka with lease {}. "
+ "Will roll back session and discard any partially received data.", lease);
} catch (final KafkaException kex) {
- getLogger().error("Exception while interacting with Kafka so will close the lease {} due to {}",
- new Object[]{lease, kex}, kex);
+ getLogger().error("Exception while interacting with Kafka so will close the lease {} due to {}", lease, kex, kex);
} catch (final Throwable t) {
- getLogger().error("Exception while processing data from kafka so will close the lease {} due to {}",
- new Object[]{lease, t}, t);
+ getLogger().error("Exception while processing data from kafka so will close the lease {} due to {}", lease, t, t);
} finally {
activeLeases.remove(lease);
}
diff --git a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/record/sink/kafka/KafkaRecordSink_2_6.java b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/record/sink/kafka/KafkaRecordSink_2_6.java
index 3899f7edf9..ab9dd3fa3f 100644
--- a/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/record/sink/kafka/KafkaRecordSink_2_6.java
+++ b/nifi-nar-bundles/nifi-kafka-bundle/nifi-kafka-2-6-processors/src/main/java/org/apache/nifi/record/sink/kafka/KafkaRecordSink_2_6.java
@@ -222,7 +222,7 @@ public class KafkaRecordSink_2_6 extends AbstractControllerService implements Ka
try {
producer = createProducer(kafkaProperties);
} catch (Exception e) {
- getLogger().error("Could not create Kafka producer due to {}", new Object[]{e.getMessage()}, e);
+ getLogger().error("Could not create Kafka producer due to {}", e.getMessage(), e);
throw new InitializationException(e);
}
}
diff --git a/nifi-nar-bundles/nifi-kudu-bundle/nifi-kudu-processors/src/main/java/org/apache/nifi/processors/kudu/PutKudu.java b/nifi-nar-bundles/nifi-kudu-bundle/nifi-kudu-processors/src/main/java/org/apache/nifi/processors/kudu/PutKudu.java
index e6f9b0ed9b..db73e9d707 100644
--- a/nifi-nar-bundles/nifi-kudu-bundle/nifi-kudu-processors/src/main/java/org/apache/nifi/processors/kudu/PutKudu.java
+++ b/nifi-nar-bundles/nifi-kudu-bundle/nifi-kudu-processors/src/main/java/org/apache/nifi/processors/kudu/PutKudu.java
@@ -510,7 +510,7 @@ public class PutKudu extends AbstractKuduProcessor {
record = recordSet.next();
}
} catch (Exception ex) {
- getLogger().error("Failed to push {} to Kudu", new Object[] {flowFile}, ex);
+ getLogger().error("Failed to push {} to Kudu", flowFile, ex);
flowFileFailures.put(flowFile, ex);
}
}
diff --git a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/PutMongo.java b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/PutMongo.java
index f9bc4aacbb..5a2b23f6f2 100644
--- a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/PutMongo.java
+++ b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/PutMongo.java
@@ -235,7 +235,7 @@ public class PutMongo extends AbstractMongoProcessor {
session.getProvenanceReporter().send(flowFile, getURI(context));
session.transfer(flowFile, REL_SUCCESS);
} catch (Exception e) {
- logger.error("Failed to insert {} into MongoDB due to {}", new Object[] {flowFile, e}, e);
+ logger.error("Failed to insert {} into MongoDB due to {}", flowFile, e, e);
session.transfer(flowFile, REL_FAILURE);
context.yield();
}
diff --git a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-services/src/main/java/org/apache/nifi/mongodb/MongoDBControllerService.java b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-services/src/main/java/org/apache/nifi/mongodb/MongoDBControllerService.java
index dbcc4988f6..3347eedf4f 100644
--- a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-services/src/main/java/org/apache/nifi/mongodb/MongoDBControllerService.java
+++ b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-services/src/main/java/org/apache/nifi/mongodb/MongoDBControllerService.java
@@ -98,7 +98,7 @@ public class MongoDBControllerService extends AbstractControllerService implemen
final MongoClientSettings clientSettings = builder.build();
return MongoClients.create(clientSettings);
} catch (Exception e) {
- getLogger().error("Failed to schedule {} due to {}", new Object[] { this.getClass().getName(), e }, e);
+ getLogger().error("Failed to schedule {} due to {}", this.getClass().getName(), e, e);
throw e;
}
}
diff --git a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-services/src/main/java/org/apache/nifi/mongodb/MongoDBLookupService.java b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-services/src/main/java/org/apache/nifi/mongodb/MongoDBLookupService.java
index ecf1db1659..a659573413 100644
--- a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-services/src/main/java/org/apache/nifi/mongodb/MongoDBLookupService.java
+++ b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-services/src/main/java/org/apache/nifi/mongodb/MongoDBLookupService.java
@@ -148,7 +148,7 @@ public class MongoDBLookupService extends JsonInferenceSchemaRegistryService imp
return Optional.ofNullable(new MapRecord(schema, result));
}
} catch (Exception ex) {
- getLogger().error("Error during lookup {}", new Object[]{ query.toJson() }, ex);
+ getLogger().error("Error during lookup {}", query.toJson(), ex);
throw new LookupFailureException(ex);
}
}
diff --git a/nifi-nar-bundles/nifi-rules-action-handler-bundle/nifi-rules-action-handler-service/src/main/java/org/apache/nifi/rules/handlers/ExpressionHandler.java b/nifi-nar-bundles/nifi-rules-action-handler-bundle/nifi-rules-action-handler-service/src/main/java/org/apache/nifi/rules/handlers/ExpressionHandler.java
index 34a08c37e0..6a69d5eb24 100644
--- a/nifi-nar-bundles/nifi-rules-action-handler-bundle/nifi-rules-action-handler-service/src/main/java/org/apache/nifi/rules/handlers/ExpressionHandler.java
+++ b/nifi-nar-bundles/nifi-rules-action-handler-bundle/nifi-rules-action-handler-service/src/main/java/org/apache/nifi/rules/handlers/ExpressionHandler.java
@@ -101,8 +101,7 @@ public class ExpressionHandler extends AbstractActionHandlerService {
executeSPEL(command, facts);
}
} catch (Exception ex) {
- getLogger().warn("Error occurred when attempting to execute expression. Action: {}, Facts - {}",
- new Object[]{action, facts}, ex);
+ getLogger().warn("Error occurred when attempting to execute expression. Action: {}, Facts - {}", action, facts, ex);
}
}else{
getLogger().warn("Command attribute was not provided. Action: {}, Facts - {}",
diff --git a/nifi-nar-bundles/nifi-rules-action-handler-bundle/nifi-rules-action-handler-service/src/test/java/org/apache/nifi/rules/handlers/MockComponentLog.java b/nifi-nar-bundles/nifi-rules-action-handler-bundle/nifi-rules-action-handler-service/src/test/java/org/apache/nifi/rules/handlers/MockComponentLog.java
index e31c35d6c2..4412a840d0 100644
--- a/nifi-nar-bundles/nifi-rules-action-handler-bundle/nifi-rules-action-handler-service/src/test/java/org/apache/nifi/rules/handlers/MockComponentLog.java
+++ b/nifi-nar-bundles/nifi-rules-action-handler-bundle/nifi-rules-action-handler-service/src/test/java/org/apache/nifi/rules/handlers/MockComponentLog.java
@@ -42,7 +42,7 @@ public class MockComponentLog implements ComponentLog {
}
@Override
- public void warn(String msg, Object[] os) {
+ public void warn(String msg, Object... os) {
warn(msg);
}
@@ -67,7 +67,7 @@ public class MockComponentLog implements ComponentLog {
}
@Override
- public void trace(String msg, Object[] os) {
+ public void trace(String msg, Object... os) {
trace(msg);
}
@@ -117,7 +117,7 @@ public class MockComponentLog implements ComponentLog {
}
@Override
- public void info(String msg, Object[] os) {
+ public void info(String msg, Object... os) {
info(msg);
}
@@ -147,8 +147,8 @@ public class MockComponentLog implements ComponentLog {
}
@Override
- public void error(String msg, Object[] os) {
- error(convertMessage(msg,os));
+ public void error(String msg, Object... os) {
+ error(convertMessage(msg, os));
}
@Override
@@ -172,8 +172,8 @@ public class MockComponentLog implements ComponentLog {
}
@Override
- public void debug(String msg, Object[] os) {
- debug(convertMessage(msg,os));
+ public void debug(String msg, Object... os) {
+ debug(convertMessage(msg, os));
}
@Override
@@ -197,7 +197,7 @@ public class MockComponentLog implements ComponentLog {
}
@Override
- public void log(LogLevel level, String msg, Object[] os) {
+ public void log(LogLevel level, String msg, Object... os) {
}
diff --git a/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedTransformRecord.java b/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedTransformRecord.java
index bd38ee9bbf..2fb32d8189 100644
--- a/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedTransformRecord.java
+++ b/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/processors/script/ScriptedTransformRecord.java
@@ -227,10 +227,10 @@ public class ScriptedTransformRecord extends ScriptedRecordProcessor {
final long millis = System.currentTimeMillis() - startMillis;
session.getProvenanceReporter().modifyContent(flowFile, "Transformed " + transformCount + " Records, Dropped " + counts.getDroppedCount() + " Records", millis);
} catch (final ProcessException e) {
- getLogger().error("After processing {} Records, encountered failure when attempting to transform {}", new Object[] {counts.getRecordCount(), flowFile}, e.getCause());
+ getLogger().error("After processing {} Records, encountered failure when attempting to transform {}", counts.getRecordCount(), flowFile, e.getCause());
session.transfer(flowFile, REL_FAILURE);
} catch (final Exception e) {
- getLogger().error("After processing {} Records, encountered failure when attempting to transform {}", new Object[] {counts.getRecordCount(), flowFile}, e);
+ getLogger().error("After processing {} Records, encountered failure when attempting to transform {}", counts.getRecordCount(), flowFile, e);
session.transfer(flowFile, REL_FAILURE);
}
}
diff --git a/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/script/ScriptRunnerFactory.java b/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/script/ScriptRunnerFactory.java
index a9f6f1fe4e..fc263c3529 100644
--- a/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/script/ScriptRunnerFactory.java
+++ b/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/main/java/org/apache/nifi/script/ScriptRunnerFactory.java
@@ -91,7 +91,7 @@ public class ScriptRunnerFactory {
try {
additionalClasspath.add(modulePath.toURI().toURL());
} catch (MalformedURLException mue) {
- log.warn("{} is not a valid file/folder, ignoring", new Object[]{modulePath.getAbsolutePath()}, mue);
+ log.warn("{} is not a valid file/folder, ignoring", modulePath.getAbsolutePath(), mue);
}
// If the path is a directory, we need to scan for JARs and add them to the classpath
@@ -109,7 +109,7 @@ public class ScriptRunnerFactory {
additionalClasspath.add(jarFile.toURI().toURL());
} catch (MalformedURLException mue) {
- log.warn("{} is not a valid file/folder, ignoring", new Object[]{modulePath.getAbsolutePath()}, mue);
+ log.warn("{} is not a valid file/folder, ignoring", modulePath.getAbsolutePath(), mue);
}
}
} else {
diff --git a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
index 70bf4cd13b..ce6e3fd993 100644
--- a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
+++ b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/GetSolr.java
@@ -288,7 +288,7 @@ public class GetSolr extends SolrProcessor {
return(req.process(getSolrClient()).getResponse().get("uniqueKey").toString());
} catch (SolrServerException | IOException e) {
- getLogger().error("Solr query to retrieve uniqueKey-field failed due to {}", new Object[]{solrQuery.toString(), e}, e);
+ getLogger().error("Solr query to retrieve uniqueKey-field failed due to {}", solrQuery.toString(), e, e);
throw new ProcessException(e);
}
}
@@ -420,12 +420,12 @@ public class GetSolr extends SolrProcessor {
} catch (final SolrServerException | SchemaNotFoundException | IOException e) {
context.yield();
session.rollback();
- logger.error("Failed to execute query {} due to {}", new Object[]{solrQuery.toString(), e}, e);
+ logger.error("Failed to execute query {} due to {}", solrQuery.toString(), e, e);
throw new ProcessException(e);
} catch (final Throwable t) {
context.yield();
session.rollback();
- logger.error("Failed to execute query {} due to {}", new Object[]{solrQuery.toString(), t}, t);
+ logger.error("Failed to execute query {} due to {}", solrQuery.toString(), t, t);
throw t;
}
}
diff --git a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/QuerySolr.java b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/QuerySolr.java
index 277a8add21..76686c75ff 100644
--- a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/QuerySolr.java
+++ b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/src/main/java/org/apache/nifi/processors/solr/QuerySolr.java
@@ -511,7 +511,7 @@ public class QuerySolr extends SolrProcessor {
flowFileResponse = session.putAttribute(flowFileResponse, EXCEPTION, e.getClass().getName());
flowFileResponse = session.putAttribute(flowFileResponse, EXCEPTION_MESSAGE, e.getMessage());
session.transfer(flowFileResponse, FAILURE);
- logger.error("Failed to execute query {} due to {}. FlowFile will be routed to relationship failure", new Object[]{solrQuery.toString(), e}, e);
+ logger.error("Failed to execute query {} due to {}. FlowFile will be routed to relationship failure", solrQuery.toString(), e, e);
if (flowFileOriginal != null) {
flowFileOriginal = session.penalize(flowFileOriginal);
}
diff --git a/nifi-nar-bundles/nifi-spark-bundle/nifi-livy-processors/src/main/java/org/apache/nifi/processors/livy/ExecuteSparkInteractive.java b/nifi-nar-bundles/nifi-spark-bundle/nifi-livy-processors/src/main/java/org/apache/nifi/processors/livy/ExecuteSparkInteractive.java
index b9edcacfe4..41eaac0152 100644
--- a/nifi-nar-bundles/nifi-spark-bundle/nifi-livy-processors/src/main/java/org/apache/nifi/processors/livy/ExecuteSparkInteractive.java
+++ b/nifi-nar-bundles/nifi-spark-bundle/nifi-livy-processors/src/main/java/org/apache/nifi/processors/livy/ExecuteSparkInteractive.java
@@ -194,7 +194,7 @@ public class ExecuteSparkInteractive extends AbstractProcessor {
// If no code was provided, assume it is in the content of the incoming flow file
code = IOUtils.toString(inputStream, charset);
} catch (IOException ioe) {
- log.error("Error reading input flowfile, penalizing and routing to failure", new Object[]{flowFile, ioe.getMessage()}, ioe);
+ log.error("Error reading input flowfile, penalizing and routing to failure", flowFile, ioe.getMessage(), ioe);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
return;
@@ -224,7 +224,7 @@ public class ExecuteSparkInteractive extends AbstractProcessor {
}
}
} catch (IOException | SessionManagerException e) {
- log.error("Failure processing flowfile {} due to {}, penalizing and routing to failure", new Object[]{flowFile, e.getMessage()}, e);
+ log.error("Failure processing flowfile {} due to {}, penalizing and routing to failure", flowFile, e.getMessage(), e);
flowFile = session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
}
diff --git a/nifi-nar-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/GetSplunk.java b/nifi-nar-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/GetSplunk.java
index 8cb12dded7..b575bf6ca4 100644
--- a/nifi-nar-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/GetSplunk.java
+++ b/nifi-nar-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/GetSplunk.java
@@ -380,7 +380,7 @@ public class GetSplunk extends AbstractProcessor {
try {
context.getStateManager().clear(Scope.CLUSTER);
} catch (IOException e) {
- getLogger().error("Unable to clear processor state due to {}", new Object[] {e.getMessage()}, e);
+ getLogger().error("Unable to clear processor state due to {}", e.getMessage(), e);
}
}
@@ -449,7 +449,7 @@ public class GetSplunk extends AbstractProcessor {
}
} catch (IOException e) {
- getLogger().error("Unable to load data from State Manager due to {}", new Object[] {e.getMessage()}, e);
+ getLogger().error("Unable to load data from State Manager due to {}", e.getMessage(), e);
context.yield();
return;
}
@@ -517,7 +517,7 @@ public class GetSplunk extends AbstractProcessor {
try {
saveState(session, new TimeRange(earliestTime, latestTime));
} catch (IOException e) {
- getLogger().error("Unable to load data from State Manager due to {}", new Object[]{e.getMessage()}, e);
+ getLogger().error("Unable to load data from State Manager due to {}", e.getMessage(), e);
session.rollback();
context.yield();
}
diff --git a/nifi-nar-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/PutSplunkHTTP.java b/nifi-nar-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/PutSplunkHTTP.java
index 861de32ba2..3ea51637c0 100644
--- a/nifi-nar-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/PutSplunkHTTP.java
+++ b/nifi-nar-bundles/nifi-splunk-bundle/nifi-splunk-processors/src/main/java/org/apache/nifi/processors/splunk/PutSplunkHTTP.java
@@ -191,7 +191,7 @@ public class PutSplunkHTTP extends SplunkAPICall {
new Object[] {responseMessage.getStatus(), IOUtils.toString(responseMessage.getContent(), "UTF-8")});
}
} catch (final Exception e) {
- getLogger().error("Error during communication with Splunk: {}", new Object[] {e.getMessage()}, e);
+ getLogger().error("Error during communication with Splunk: {}", e.getMessage(), e);
if (responseMessage != null) {
try {
@@ -264,7 +264,7 @@ public class PutSplunkHTTP extends SplunkAPICall {
parameters.add(URLEncoder.encode(parameter.getKey(), "UTF-8") + '=' + URLEncoder.encode(parameter.getValue(), "UTF-8"));
}
} catch (final UnsupportedEncodingException e) {
- getLogger().error("Could not be initialized because of: {}", new Object[]{e.getMessage()}, e);
+ getLogger().error("Could not be initialized because of: {}", e.getMessage(), e);
throw new ProcessException(e);
}
diff --git a/nifi-nar-bundles/nifi-sql-reporting-bundle/nifi-sql-reporting-tasks/src/main/java/org/apache/nifi/reporting/sql/MetricsEventReportingTask.java b/nifi-nar-bundles/nifi-sql-reporting-bundle/nifi-sql-reporting-tasks/src/main/java/org/apache/nifi/reporting/sql/MetricsEventReportingTask.java
index c3a0baa47e..1f0831e8fe 100644
--- a/nifi-nar-bundles/nifi-sql-reporting-bundle/nifi-sql-reporting-tasks/src/main/java/org/apache/nifi/reporting/sql/MetricsEventReportingTask.java
+++ b/nifi-nar-bundles/nifi-sql-reporting-bundle/nifi-sql-reporting-tasks/src/main/java/org/apache/nifi/reporting/sql/MetricsEventReportingTask.java
@@ -90,7 +90,7 @@ public class MetricsEventReportingTask extends AbstractReportingTask implements
fireRules(context, actionHandler, rulesEngineService, sql);
} catch (Exception e) {
- getLogger().error("Error opening loading rules: {}", new Object[]{e.getMessage()}, e);
+ getLogger().error("Error opening loading rules: {}", e.getMessage(), e);
}
}
diff --git a/nifi-nar-bundles/nifi-sql-reporting-bundle/nifi-sql-reporting-tasks/src/main/java/org/apache/nifi/reporting/sql/MetricsSqlQueryService.java b/nifi-nar-bundles/nifi-sql-reporting-bundle/nifi-sql-reporting-tasks/src/main/java/org/apache/nifi/reporting/sql/MetricsSqlQueryService.java
index 09b3be0f4a..867b9038bf 100644
--- a/nifi-nar-bundles/nifi-sql-reporting-bundle/nifi-sql-reporting-tasks/src/main/java/org/apache/nifi/reporting/sql/MetricsSqlQueryService.java
+++ b/nifi-nar-bundles/nifi-sql-reporting-bundle/nifi-sql-reporting-tasks/src/main/java/org/apache/nifi/reporting/sql/MetricsSqlQueryService.java
@@ -128,7 +128,7 @@ public class MetricsSqlQueryService implements MetricsQueryService {
try {
recordSet = new ResultSetRecordSet(rs, writerSchema, defaultPrecision, defaultScale);
} catch (final SQLException e) {
- getLogger().error("Error creating record set from query results due to {}", new Object[]{e.getMessage()}, e);
+ getLogger().error("Error creating record set from query results due to {}", e.getMessage(), e);
}
return recordSet;
diff --git a/nifi-nar-bundles/nifi-sql-reporting-bundle/nifi-sql-reporting-tasks/src/main/java/org/apache/nifi/reporting/sql/QueryNiFiReportingTask.java b/nifi-nar-bundles/nifi-sql-reporting-bundle/nifi-sql-reporting-tasks/src/main/java/org/apache/nifi/reporting/sql/QueryNiFiReportingTask.java
index 3d1b0b1a26..78c7735e14 100644
--- a/nifi-nar-bundles/nifi-sql-reporting-bundle/nifi-sql-reporting-tasks/src/main/java/org/apache/nifi/reporting/sql/QueryNiFiReportingTask.java
+++ b/nifi-nar-bundles/nifi-sql-reporting-bundle/nifi-sql-reporting-tasks/src/main/java/org/apache/nifi/reporting/sql/QueryNiFiReportingTask.java
@@ -103,7 +103,7 @@ public class QueryNiFiReportingTask extends AbstractReportingTask implements Que
try {
recordSet = metricsQueryService.getResultSetRecordSet(queryResult);
} catch (final Exception e) {
- getLogger().error("Error creating record set from query results due to {}", new Object[]{e.getMessage()}, e);
+ getLogger().error("Error creating record set from query results due to {}", e.getMessage(), e);
return;
}
@@ -116,7 +116,7 @@ public class QueryNiFiReportingTask extends AbstractReportingTask implements Que
attributes.put("reporting.task.type", this.getClass().getSimpleName());
recordSinkService.sendData(recordSet, attributes, context.getProperty(QueryMetricsUtil.INCLUDE_ZERO_RECORD_RESULTS).asBoolean());
} catch (Exception e) {
- getLogger().error("Error during transmission of query results due to {}", new Object[]{e.getMessage()}, e);
+ getLogger().error("Error during transmission of query results due to {}", e.getMessage(), e);
return;
} finally {
metricsQueryService.closeQuietly(queryResult);
@@ -124,7 +124,7 @@ public class QueryNiFiReportingTask extends AbstractReportingTask implements Que
final long elapsedMillis = stopWatch.getElapsed(TimeUnit.MILLISECONDS);
getLogger().debug("Successfully queried and sent in {} millis", elapsedMillis);
} catch (Exception e) {
- getLogger().error("Error processing the query due to {}", new Object[]{e.getMessage()}, e);
+ getLogger().error("Error processing the query due to {}", e.getMessage(), e);
}
}
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractExecuteSQL.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractExecuteSQL.java
index a76b2386be..0816bbf163 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractExecuteSQL.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractExecuteSQL.java
@@ -272,7 +272,7 @@ public abstract class AbstractExecuteSQL extends AbstractProcessor {
st.setFetchSize(fetchSize);
} catch (SQLException se) {
// Not all drivers support this, just log the error (at debug level) and move on
- logger.debug("Cannot set fetch size to {} due to {}", new Object[]{fetchSize, se.getLocalizedMessage()}, se);
+ logger.debug("Cannot set fetch size to {} due to {}", fetchSize, se.getLocalizedMessage(), se);
}
}
st.setQueryTimeout(queryTimeout); // timeout in seconds
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java
index 03df6297af..b4177b964e 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java
@@ -321,7 +321,7 @@ public abstract class AbstractQueryDatabaseTable extends AbstractDatabaseFetchPr
st.setFetchSize(fetchSize);
} catch (SQLException se) {
// Not all drivers support this, just log the error (at debug level) and move on
- logger.debug("Cannot set fetch size to {} due to {}", new Object[]{fetchSize, se.getLocalizedMessage()}, se);
+ logger.debug("Cannot set fetch size to {} due to {}", fetchSize, se.getLocalizedMessage(), se);
}
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
index 7af19faf00..1a232f330c 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertJSONToSQL.java
@@ -327,7 +327,7 @@ public class ConvertJSONToSQL extends AbstractProcessor {
}
});
} catch (ProcessException e) {
- getLogger().error("Failed to convert {} into a SQL statement due to {}; routing to failure", new Object[]{flowFile, e.toString()}, e);
+ getLogger().error("Failed to convert {} into a SQL statement due to {}; routing to failure", flowFile, e.toString(), e);
session.transfer(flowFile, REL_FAILURE);
return;
}
@@ -345,7 +345,7 @@ public class ConvertJSONToSQL extends AbstractProcessor {
}
});
} catch (final ProcessException pe) {
- getLogger().error("Failed to parse {} as JSON due to {}; routing to failure", new Object[] {flowFile, pe.toString()}, pe);
+ getLogger().error("Failed to parse {} as JSON due to {}; routing to failure", flowFile, pe.toString(), pe);
session.transfer(flowFile, REL_FAILURE);
return;
}
@@ -398,7 +398,7 @@ public class ConvertJSONToSQL extends AbstractProcessor {
}
} catch (final ProcessException pe) {
getLogger().error("Failed to convert {} to a SQL {} statement due to {}; routing to failure",
- new Object[] { flowFile, statementType, pe.toString() }, pe);
+ flowFile, statementType, pe.toString(), pe);
session.remove(created);
session.transfer(flowFile, REL_FAILURE);
return;
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EnforceOrder.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EnforceOrder.java
index 46e5cebfcb..879a9d30c0 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EnforceOrder.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EnforceOrder.java
@@ -521,7 +521,7 @@ public class EnforceOrder extends AbstractProcessor {
private void transferToFailure(final FlowFile flowFile, final String message, final Throwable cause) {
if (cause != null) {
- getLogger().warn(message + " {}", new Object[]{flowFile}, cause);
+ getLogger().warn(message + " {}", flowFile, cause);
} else {
getLogger().warn(message + " {}", new Object[]{flowFile});
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
index 0cfb46a642..e72352c306 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java
@@ -302,7 +302,7 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor {
} catch (final PathNotFoundException e) {
if (pathNotFound.equals(PATH_NOT_FOUND_WARN)) {
logger.warn("FlowFile {} could not find path {} for attribute key {}.",
- new Object[]{flowFile.getId(), jsonPathExp.getPath(), jsonPathAttrKey}, e);
+ flowFile.getId(), jsonPathExp.getPath(), jsonPathAttrKey, e);
}
if (destinationIsAttribute) {
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java
index 8ac9d1896e..e16eec7189 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java
@@ -242,7 +242,7 @@ public class FetchFile extends AbstractProcessor {
} catch (Exception e) {
getLogger().error("Could not fetch file {} from file system for {} because Completion Strategy is configured to move the original file to {}, "
+ "but that directory does not exist and could not be created due to: {}",
- new Object[] {file, flowFile, targetDir, e.getMessage()}, e);
+ file, flowFile, targetDir, e.getMessage(), e);
session.transfer(flowFile, REL_FAILURE);
return;
}
@@ -267,7 +267,7 @@ public class FetchFile extends AbstractProcessor {
try (final FileInputStream fis = new FileInputStream(file)) {
flowFile = session.importFrom(fis, flowFile);
} catch (final IOException ioe) {
- getLogger().error("Could not fetch file {} from file system for {} due to {}; routing to failure", new Object[] {file, flowFile, ioe.toString()}, ioe);
+ getLogger().error("Could not fetch file {} from file system for {} due to {}; routing to failure", file, flowFile, ioe.toString(), ioe);
session.transfer(session.penalize(flowFile), REL_FAILURE);
return;
}
@@ -328,7 +328,7 @@ public class FetchFile extends AbstractProcessor {
// Handle completion failures
if (completionFailureException != null) {
getLogger().warn("Successfully fetched the content from {} for {} but failed to perform Completion Action due to {}; routing to success",
- new Object[] {file, flowFile, completionFailureException}, completionFailureException);
+ file, flowFile, completionFailureException, completionFailureException);
}
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFileTransfer.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFileTransfer.java
index e4df17383b..227904282d 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFileTransfer.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFileTransfer.java
@@ -186,7 +186,7 @@ public abstract class FetchFileTransfer extends AbstractProcessor {
try {
wrapper.getFileTransfer().close();
} catch (final IOException ioe) {
- getLogger().warn("Failed to close Idle Connection due to {}", new Object[] {ioe}, ioe);
+ getLogger().warn("Failed to close Idle Connection due to {}", ioe, ioe);
}
}
}
@@ -275,7 +275,7 @@ public abstract class FetchFileTransfer extends AbstractProcessor {
return;
} catch (final ProcessException | IOException e) {
getLogger().error("Failed to fetch content for {} from filename {} on remote host {}:{} due to {}; routing to comms.failure",
- new Object[]{flowFile, filename, host, port, e.toString()}, e);
+ flowFile, filename, host, port, e.toString(), e);
session.transfer(session.penalize(flowFile), REL_COMMS_FAILURE);
cleanupTransfer(transfer, true, transferQueue, host, port);
return;
@@ -325,7 +325,7 @@ public abstract class FetchFileTransfer extends AbstractProcessor {
try {
transfer.close();
} catch (final IOException e) {
- getLogger().warn("Failed to close connection to {}:{} due to {}", new Object[]{host, port, e.getMessage()}, e);
+ getLogger().warn("Failed to close connection to {}:{} due to {}", host, port, e.getMessage(), e);
}
} else {
getLogger().debug("Returning FileTransfer to pool...");
@@ -342,7 +342,7 @@ public abstract class FetchFileTransfer extends AbstractProcessor {
// file doesn't exist -- effectively the same as removing it. Move on.
} catch (final IOException ioe) {
getLogger().warn("Successfully fetched the content for {} from {}:{}{} but failed to remove the remote file due to {}",
- new Object[]{flowFile, host, port, filename, ioe}, ioe);
+ flowFile, host, port, filename, ioe, ioe);
}
} else if (COMPLETION_MOVE.getValue().equalsIgnoreCase(completionStrategy)) {
final String targetDir = context.getProperty(MOVE_DESTINATION_DIR).evaluateAttributeExpressions(flowFile).getValue();
@@ -360,7 +360,7 @@ public abstract class FetchFileTransfer extends AbstractProcessor {
} catch (final IOException ioe) {
getLogger().warn("Successfully fetched the content for {} from {}:{}{} but failed to rename the remote file due to {}",
- new Object[]{flowFile, host, port, filename, ioe}, ioe);
+ flowFile, host, port, filename, ioe, ioe);
}
}
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateTableFetch.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateTableFetch.java
index 2b2731457a..97e3bfe4b6 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateTableFetch.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateTableFetch.java
@@ -559,7 +559,7 @@ public class GenerateTableFetch extends AbstractDatabaseFetchProcessor {
} catch (final ProcessException pe) {
// Log the cause of the ProcessException if it is available
Throwable t = (pe.getCause() == null ? pe : pe.getCause());
- logger.error("Error during processing: {}", new Object[]{t.getMessage()}, t);
+ logger.error("Error during processing: {}", t.getMessage(), t);
session.rollback();
context.yield();
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFileTransfer.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFileTransfer.java
index da362366ab..6afbc41aa2 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFileTransfer.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFileTransfer.java
@@ -223,7 +223,7 @@ public abstract class GetFileTransfer extends AbstractProcessor {
return;
} catch (final FlowFileAccessException e) {
context.yield();
- logger.error("Unable to retrieve file {} due to {}", new Object[]{file.getFullPathFileName(), e.getCause()}, e);
+ logger.error("Unable to retrieve file {} due to {}", file.getFullPathFileName(), e.getCause(), e);
try {
transfer.close();
@@ -261,7 +261,7 @@ public abstract class GetFileTransfer extends AbstractProcessor {
try {
fileTransfer.deleteFile(receivedFlowFile, null, remoteFilename);
} catch (final IOException e) {
- getLogger().error("Failed to remove remote file {} due to {}. This file may be duplicated in a subsequent run", new Object[] {remoteFilename, e}, e);
+ getLogger().error("Failed to remove remote file {} due to {}. This file may be duplicated in a subsequent run", remoteFilename, e, e);
}
}
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
index 2dd47c6bb4..390db0b4d2 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java
@@ -631,7 +631,7 @@ public class HandleHttpRequest extends AbstractProcessor {
try {
part.delete();
} catch (Exception e) {
- getLogger().error("Couldn't delete underlying storage for {}", new Object[]{part}, e);
+ getLogger().error("Couldn't delete underlying storage for {}", part, e);
}
}
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListDatabaseTables.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListDatabaseTables.java
index 64d70c0181..998ecf1a71 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListDatabaseTables.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListDatabaseTables.java
@@ -468,7 +468,7 @@ public class ListDatabaseTables extends AbstractProcessor {
try {
recordWriter.close();
} catch (IOException e) {
- logger.error("Failed to write listing as Records due to {}", new Object[] {e}, e);
+ logger.error("Failed to write listing as Records due to {}", e, e);
}
session.remove(flowFile);
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java
index 340d545fc8..38ecdc49ad 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java
@@ -617,7 +617,7 @@ public class ListFile extends AbstractListProcessor {
getLogger().debug("The following file is not readable: {}", new Object[]{path.toString()});
return FileVisitResult.SKIP_SUBTREE;
} else {
- getLogger().error("Error during visiting file {}: {}", new Object[]{path.toString(), e.getMessage()}, e);
+ getLogger().error("Error during visiting file {}: {}", path.toString(), e.getMessage(), e);
return FileVisitResult.TERMINATE;
}
}
@@ -625,7 +625,7 @@ public class ListFile extends AbstractListProcessor {
@Override
public FileVisitResult postVisitDirectory(final Path dir, final IOException e) {
if (e != null) {
- getLogger().error("Error during visiting directory {}: {}", new Object[]{dir.toString(), e.getMessage()}, e);
+ getLogger().error("Error during visiting directory {}: {}", dir.toString(), e.getMessage(), e);
}
return FileVisitResult.CONTINUE;
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
index 5b4afdca81..6f40029fbd 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java
@@ -937,7 +937,7 @@ public class MergeContent extends BinFiles {
out.closeEntry();
unmerged.remove(flowFile);
} catch (ZipException e) {
- getLogger().error("Encountered exception merging {}", new Object[] {flowFile}, e);
+ getLogger().error("Encountered exception merging {}", flowFile, e);
}
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Notify.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Notify.java
index 1949ec32e4..9406daeff7 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Notify.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Notify.java
@@ -230,7 +230,7 @@ public class Notify extends AbstractProcessor {
try {
delta = Integer.parseInt(deltaStr);
} catch (final NumberFormatException e) {
- logger.error("Failed to calculate delta for FlowFile {} due to {}", new Object[] {flowFile, e}, e);
+ logger.error("Failed to calculate delta for FlowFile {} due to {}", flowFile, e, e);
session.transfer(session.putAttribute(flowFile, NOTIFIED_ATTRIBUTE_NAME, String.valueOf(false)), REL_FAILURE);
continue;
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutRecord.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutRecord.java
index f51becee2b..f0c1eca797 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutRecord.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutRecord.java
@@ -161,27 +161,27 @@ public class PutRecord extends AbstractProcessor {
}
} catch (RetryableIOException rioe) {
- getLogger().warn("Error during transmission of records due to {}, routing to retry", new Object[]{rioe.getMessage()}, rioe);
+ getLogger().warn("Error during transmission of records due to {}, routing to retry", rioe.getMessage(), rioe);
session.transfer(flowFile, REL_RETRY);
return;
} catch (SchemaNotFoundException snfe) {
throw new ProcessException("Error determining schema of flowfile records: " + snfe.getMessage(), snfe);
} catch (MalformedRecordException e) {
- getLogger().error("Error reading records from {} due to {}, routing to failure", new Object[]{flowFile, e.getMessage()}, e);
+ getLogger().error("Error reading records from {} due to {}, routing to failure", flowFile, e.getMessage(), e);
session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
return;
} catch (IOException ioe) {
// The cause might be a MalformedRecordException (RecordReader wraps it in an IOException), send to failure in that case
if (ioe.getCause() instanceof MalformedRecordException) {
- getLogger().error("Error reading records from {} due to {}, routing to failure", new Object[]{flowFile, ioe.getMessage()}, ioe);
+ getLogger().error("Error reading records from {} due to {}, routing to failure", flowFile, ioe.getMessage(), ioe);
session.penalize(flowFile);
session.transfer(flowFile, REL_FAILURE);
return;
}
throw new ProcessException("Error reading from flowfile input stream: " + ioe.getMessage(), ioe);
} catch (Exception e) {
- getLogger().error("Error during transmission of records due to {}, routing to failure", new Object[]{e.getMessage()}, e);
+ getLogger().error("Error during transmission of records due to {}, routing to failure", e.getMessage(), e);
session.transfer(flowFile, REL_FAILURE);
return;
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
index 744cb39443..7051a4026f 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java
@@ -368,7 +368,7 @@ public class ReplaceText extends AbstractProcessor {
session.transfer(flowFile, REL_FAILURE);
return;
} catch (IllegalAttributeException | AttributeExpressionLanguageException e) {
- logger.warn("Transferred {} to 'failure' due to {}", new Object[] { flowFile, e.toString() }, e);
+ logger.warn("Transferred {} to 'failure' due to {}", flowFile, e.toString(), e);
session.transfer(flowFile, REL_FAILURE);
return;
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SampleRecord.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SampleRecord.java
index 8cbdb63334..faeabbf05a 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SampleRecord.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SampleRecord.java
@@ -285,7 +285,7 @@ public class SampleRecord extends AbstractProcessor {
attributes.put(CoreAttributes.MIME_TYPE.key(), recordSetWriter.getMimeType());
attributes.putAll(writeResult.getAttributes());
} catch (Exception e) {
- getLogger().error("Error during transmission of records due to {}, routing to failure", new Object[]{e.getMessage()}, e);
+ getLogger().error("Error during transmission of records due to {}, routing to failure", e.getMessage(), e);
session.transfer(flowFile, REL_FAILURE);
session.remove(sampledFlowFile);
return;
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TailFile.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TailFile.java
index 11463bf27f..1c95368c9f 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TailFile.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TailFile.java
@@ -799,7 +799,7 @@ public class TailFile extends AbstractProcessor {
cleanup(context);
tfo.setState(new TailFileState(filename, file, fileChannel, position, timestamp, file.length(), checksum, tfo.getState().getBuffer()));
} catch (final IOException ioe) {
- getLogger().error("Attempted to position Reader at current position in file {} but failed to do so due to {}", new Object[]{file, ioe.toString()}, ioe);
+ getLogger().error("Attempted to position Reader at current position in file {} but failed to do so due to {}", file, ioe.toString(), ioe);
context.yield();
return;
}
@@ -983,11 +983,11 @@ public class TailFile extends AbstractProcessor {
try {
reader.position(newPosition);
} catch (IOException ex) {
- getLogger().warn("Couldn't reposition the reader for {} due to {}", new Object[]{ file, ex }, ex);
+ getLogger().warn("Couldn't reposition the reader for {} due to {}", file, ex, ex);
try {
reader.close();
} catch (IOException ex2) {
- getLogger().warn("Failed to close reader for {} due to {}", new Object[]{ file, ex2 }, ex2);
+ getLogger().warn("Failed to close reader for {} due to {}", file, ex2, ex2);
}
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java
index c161c21443..32e73ae4c1 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java
@@ -353,7 +353,7 @@ public class UpdateDatabaseTable extends AbstractProcessor {
} catch (ProcessException rrfe) {
log.error(
"Failed to create {} for {} - routing to failure",
- new Object[]{RecordReader.class.getSimpleName(), flowFile},
+ RecordReader.class.getSimpleName(), flowFile,
rrfe
);
// Since we are wrapping the exceptions above there should always be a cause
@@ -447,11 +447,11 @@ public class UpdateDatabaseTable extends AbstractProcessor {
}
} catch (IOException | SQLException e) {
flowFile = session.putAttribute(flowFile, ATTR_OUTPUT_TABLE, tableName);
- log.error("Exception while processing {} - routing to failure", new Object[]{flowFile}, e);
+ log.error("Exception while processing {} - routing to failure", flowFile, e);
session.transfer(flowFile, REL_FAILURE);
} catch (DiscontinuedException e) {
// The input FlowFile processing is discontinued. Keep it in the input queue.
- getLogger().warn("Discontinued processing for {} due to {}", new Object[]{flowFile, e}, e);
+ getLogger().warn("Discontinued processing for {} due to {}", flowFile, e, e);
session.transfer(flowFile, Relationship.SELF);
} catch (Throwable t) {
throw (t instanceof ProcessException) ? (ProcessException) t : new ProcessException(t);
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateCsv.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateCsv.java
index ccff266de5..a9d48555bb 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateCsv.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateCsv.java
@@ -516,7 +516,7 @@ public class ValidateCsv extends AbstractProcessor {
valid.set(false);
if(isWholeFFValidation) {
validationError.set(e.getLocalizedMessage());
- logger.debug("Failed to validate {} against schema due to {}; routing to 'invalid'", new Object[]{flowFile}, e);
+ logger.debug("Failed to validate {} against schema due to {}; routing to 'invalid'", flowFile, e);
break;
} else {
// we append the invalid line to the flow file that will be routed to invalid relationship
@@ -544,7 +544,7 @@ public class ValidateCsv extends AbstractProcessor {
} catch (final IOException e) {
valid.set(false);
- logger.error("Failed to validate {} against schema due to {}", new Object[]{flowFile}, e);
+ logger.error("Failed to validate {} against schema due to {}", flowFile, e);
}
}
});
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Wait.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Wait.java
index ba674d747a..8aec9f1be5 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Wait.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Wait.java
@@ -473,14 +473,14 @@ public class Wait extends AbstractProcessor {
targetCount = Long.valueOf(context.getProperty(TARGET_SIGNAL_COUNT).evaluateAttributeExpressions(flowFile).getValue());
} catch (final NumberFormatException e) {
transferToFailure.accept(flowFile);
- logger.error("Failed to parse targetCount when processing {} due to {}", new Object[] {flowFile, e}, e);
+ logger.error("Failed to parse targetCount when processing {} due to {}", flowFile, e, e);
continue;
}
try {
releasableFlowFileCount = Integer.valueOf(context.getProperty(RELEASABLE_FLOWFILE_COUNT).evaluateAttributeExpressions(flowFile).getValue());
} catch (final NumberFormatException e) {
transferToFailure.accept(flowFile);
- logger.error("Failed to parse releasableFlowFileCount when processing {} due to {}", new Object[] {flowFile, e}, e);
+ logger.error("Failed to parse releasableFlowFileCount when processing {} due to {}", flowFile, e, e);
continue;
}
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java
index 1cd28227d5..03f740e2d4 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/FTPTransfer.java
@@ -183,7 +183,7 @@ public class FTPTransfer implements FileTransfer {
client.disconnect();
}
} catch (final Exception ex) {
- logger.warn("Failed to close FTPClient due to {}", new Object[] {ex.toString()}, ex);
+ logger.warn("Failed to close FTPClient due to {}", ex.toString(), ex);
}
client = null;
}
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java
index 2a06be5845..01fdf821a0 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java
@@ -658,7 +658,7 @@ public class SFTPTransfer implements FileTransfer {
sftpClient.close();
}
} catch (final Exception ex) {
- logger.warn("Failed to close SFTPClient due to {}", new Object[] {ex.toString()}, ex);
+ logger.warn("Failed to close SFTPClient due to {}", ex.toString(), ex);
}
sftpClient = null;
@@ -667,7 +667,7 @@ public class SFTPTransfer implements FileTransfer {
sshClient.disconnect();
}
} catch (final Exception ex) {
- logger.warn("Failed to close SSHClient due to {}", new Object[] {ex.toString()}, ex);
+ logger.warn("Failed to close SSHClient due to {}", ex.toString(), ex);
}
sshClient = null;
}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_2-client-service-bundle/nifi-hbase_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_2_ListLookupService.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_2-client-service-bundle/nifi-hbase_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_2_ListLookupService.java
index 1f9ebcaf62..a7405f5f8b 100644
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_2-client-service-bundle/nifi-hbase_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_2_ListLookupService.java
+++ b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_2-client-service-bundle/nifi-hbase_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_2_ListLookupService.java
@@ -90,7 +90,7 @@ public class HBase_2_ListLookupService extends AbstractHBaseLookupService implem
return Optional.empty();
}
} catch (IOException e) {
- getLogger().error("Error occurred loading {}", new Object[] { coordinates.get("rowKey") }, e);
+ getLogger().error("Error occurred loading {}", coordinates.get("rowKey"), e);
throw new LookupFailureException(e);
}
}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_2-client-service-bundle/nifi-hbase_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_2_RecordLookupService.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_2-client-service-bundle/nifi-hbase_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_2_RecordLookupService.java
index 51575436b6..b61cfc6fca 100644
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_2-client-service-bundle/nifi-hbase_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_2_RecordLookupService.java
+++ b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_2-client-service-bundle/nifi-hbase_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_2_RecordLookupService.java
@@ -67,7 +67,7 @@ public class HBase_2_RecordLookupService extends AbstractHBaseLookupService impl
return Optional.empty();
}
} catch (IOException e) {
- getLogger().error("Error occurred loading {}", new Object[] { coordinates.get("rowKey") }, e);
+ getLogger().error("Error occurred loading {}", coordinates.get("rowKey"), e);
throw new LookupFailureException(e);
}
}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/src/main/java/org/apache/nifi/lookup/maxmind/IPLookupService.java b/nifi-nar-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/src/main/java/org/apache/nifi/lookup/maxmind/IPLookupService.java
index bd93a1e7ee..0e4b9d98aa 100644
--- a/nifi-nar-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/src/main/java/org/apache/nifi/lookup/maxmind/IPLookupService.java
+++ b/nifi-nar-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/src/main/java/org/apache/nifi/lookup/maxmind/IPLookupService.java
@@ -251,7 +251,7 @@ public class IPLookupService extends AbstractControllerService implements Record
inetAddress = InetAddress.getByName(ipAddress);
} catch (final IOException ioe) {
getLogger().warn("Could not resolve the IP for value '{}'. This is usually caused by issue resolving the appropriate DNS record or " +
- "providing the service with an invalid IP address", new Object[] {coordinates}, ioe);
+ "providing the service with an invalid IP address", coordinates, ioe);
return Optional.empty();
}
diff --git a/nifi-nar-bundles/nifi-stateful-analysis-bundle/nifi-stateful-analysis-processors/src/main/java/org/apache/nifi/processors/stateful/analysis/AttributeRollingWindow.java b/nifi-nar-bundles/nifi-stateful-analysis-bundle/nifi-stateful-analysis-processors/src/main/java/org/apache/nifi/processors/stateful/analysis/AttributeRollingWindow.java
index 8aafb97927..ba1bd11182 100644
--- a/nifi-nar-bundles/nifi-stateful-analysis-bundle/nifi-stateful-analysis-processors/src/main/java/org/apache/nifi/processors/stateful/analysis/AttributeRollingWindow.java
+++ b/nifi-nar-bundles/nifi-stateful-analysis-bundle/nifi-stateful-analysis-processors/src/main/java/org/apache/nifi/processors/stateful/analysis/AttributeRollingWindow.java
@@ -179,7 +179,7 @@ public class AttributeRollingWindow extends AbstractProcessor {
}
} catch (Exception e) {
- getLogger().error("Ran into an error while processing {}.", new Object[] { flowFile}, e);
+ getLogger().error("Ran into an error while processing {}.", flowFile, e);
session.transfer(flowFile, REL_FAILURE);
}
}
@@ -189,7 +189,7 @@ public class AttributeRollingWindow extends AbstractProcessor {
try {
state = new HashMap<>(session.getState(SCOPE).toMap());
} catch (IOException e) {
- getLogger().error("Failed to get the initial state when processing {}; transferring FlowFile back to its incoming queue", new Object[]{flowFile}, e);
+ getLogger().error("Failed to get the initial state when processing {}; transferring FlowFile back to its incoming queue", flowFile, e);
session.transfer(flowFile);
context.yield();
return;
@@ -237,7 +237,7 @@ public class AttributeRollingWindow extends AbstractProcessor {
session.setState(state, SCOPE);
} catch (IOException e) {
getLogger().error("Failed to set the state after successfully processing {} due a failure when setting the state. Transferring to '{}'",
- new Object[]{flowFile, REL_FAILED_SET_STATE.getName()}, e);
+ flowFile, REL_FAILED_SET_STATE.getName(), e);
session.transfer(flowFile, REL_FAILED_SET_STATE);
context.yield();
@@ -261,7 +261,7 @@ public class AttributeRollingWindow extends AbstractProcessor {
try {
state = new HashMap<>(session.getState(SCOPE).toMap());
} catch (IOException e) {
- getLogger().error("Failed to get the initial state when processing {}; transferring FlowFile back to its incoming queue", new Object[]{flowFile}, e);
+ getLogger().error("Failed to get the initial state when processing {}; transferring FlowFile back to its incoming queue", flowFile, e);
session.transfer(flowFile);
context.yield();
return;
@@ -353,7 +353,7 @@ public class AttributeRollingWindow extends AbstractProcessor {
try {
session.setState(state, SCOPE);
} catch (IOException e) {
- getLogger().error("Failed to get the initial state when processing {}; transferring FlowFile back to its incoming queue", new Object[]{flowFile}, e);
+ getLogger().error("Failed to get the initial state when processing {}; transferring FlowFile back to its incoming queue", flowFile, e);
session.transfer(flowFile);
context.yield();
return;
diff --git a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
index 571e744103..22de5ae9e3 100644
--- a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
+++ b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/src/main/java/org/apache/nifi/processors/attributes/UpdateAttribute.java
@@ -461,7 +461,7 @@ public class UpdateAttribute extends AbstractProcessor implements Searchable {
stateWorkingAttributes = null;
}
} catch (IOException e) {
- logger.error("Failed to get the initial state when processing {}; transferring FlowFile back to its incoming queue", new Object[]{incomingFlowFile}, e);
+ logger.error("Failed to get the initial state when processing {}; transferring FlowFile back to its incoming queue", incomingFlowFile, e);
session.transfer(incomingFlowFile);
context.yield();
return;
@@ -525,7 +525,7 @@ public class UpdateAttribute extends AbstractProcessor implements Searchable {
}
} catch (IOException e) {
logger.error("Failed to set the state after successfully processing {} due a failure when setting the state. This is normally due to multiple threads running at " +
- "once; transferring to '{}'", new Object[]{incomingFlowFile, REL_FAILED_SET_STATE.getName()}, e);
+ "once; transferring to '{}'", incomingFlowFile, REL_FAILED_SET_STATE.getName(), e);
flowFilesToTransfer.remove(incomingFlowFile);
if (flowFilesToTransfer.size() > 0){
diff --git a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java
index 25ea1da832..803ffc4b59 100644
--- a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java
+++ b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-processors/src/main/java/org/apache/nifi/processors/websocket/AbstractWebSocketGatewayProcessor.java
@@ -189,7 +189,7 @@ public abstract class AbstractWebSocketGatewayProcessor extends AbstractSessionF
webSocketService.deregisterProcessor(endpointId, this);
webSocketService = null;
} catch (WebSocketConfigurationException e) {
- logger.warn("Failed to deregister processor {} due to: {}", new Object[]{this, e}, e);
+ logger.warn("Failed to deregister processor {} due to: {}", this, e, e);
}
}
diff --git a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-services-jetty/src/main/java/org/apache/nifi/websocket/jetty/JettyWebSocketClient.java b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-services-jetty/src/main/java/org/apache/nifi/websocket/jetty/JettyWebSocketClient.java
index 4ac1371280..7a0657c8c5 100644
--- a/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-services-jetty/src/main/java/org/apache/nifi/websocket/jetty/JettyWebSocketClient.java
+++ b/nifi-nar-bundles/nifi-websocket-bundle/nifi-websocket-services-jetty/src/main/java/org/apache/nifi/websocket/jetty/JettyWebSocketClient.java
@@ -283,7 +283,7 @@ public class JettyWebSocketClient extends AbstractJettyWebSocketService implemen
try {
maintainSessions();
} catch (final Exception e) {
- getLogger().warn("Failed to maintain sessions due to {}", new Object[]{e}, e);
+ getLogger().warn("Failed to maintain sessions due to {}", e, e);
}
}, sessionMaintenanceInterval, sessionMaintenanceInterval, TimeUnit.MILLISECONDS);
}
@@ -319,7 +319,7 @@ public class JettyWebSocketClient extends AbstractJettyWebSocketService implemen
try {
sessionMaintenanceScheduler.shutdown();
} catch (Exception e) {
- getLogger().warn("Failed to shutdown session maintainer due to {}", new Object[]{e}, e);
+ getLogger().warn("Failed to shutdown session maintainer due to {}", e, e);
}
sessionMaintenanceScheduler = null;
}