diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractDatabaseFetchProcessor.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractDatabaseFetchProcessor.java index 939d84555a..2e01ee1e74 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractDatabaseFetchProcessor.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractDatabaseFetchProcessor.java @@ -50,7 +50,6 @@ import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeParseException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; @@ -67,18 +66,18 @@ import static java.sql.Types.BINARY; import static java.sql.Types.BIT; import static java.sql.Types.BLOB; import static java.sql.Types.BOOLEAN; +import static java.sql.Types.CHAR; import static java.sql.Types.CLOB; +import static java.sql.Types.DATE; import static java.sql.Types.DECIMAL; import static java.sql.Types.DOUBLE; import static java.sql.Types.FLOAT; import static java.sql.Types.INTEGER; -import static java.sql.Types.LONGVARBINARY; -import static java.sql.Types.NUMERIC; -import static java.sql.Types.CHAR; -import static java.sql.Types.DATE; import static java.sql.Types.LONGNVARCHAR; +import static java.sql.Types.LONGVARBINARY; import static java.sql.Types.LONGVARCHAR; import static java.sql.Types.NCHAR; +import static java.sql.Types.NUMERIC; import static java.sql.Types.NVARCHAR; import static java.sql.Types.REAL; import static java.sql.Types.ROWID; @@ -283,10 +282,10 @@ public abstract class AbstractDatabaseFetchProcessor extends AbstractSessionFact columnTypeMap.clear(); } - final List maxValueColumnNameList = Arrays.asList(maxValueColumnNames.toLowerCase().split(",")); + final String[] maxValueColumnNameList = maxValueColumnNames.toLowerCase().split(","); final List maxValueQualifiedColumnNameList = new ArrayList<>(); - for (String maxValueColumn:maxValueColumnNameList) { + for (String maxValueColumn : maxValueColumnNameList) { String colKey = getStateKey(tableName, maxValueColumn.trim(), dbAdapter); maxValueQualifiedColumnNameList.add(colKey); } @@ -304,7 +303,7 @@ public abstract class AbstractDatabaseFetchProcessor extends AbstractSessionFact columnTypeMap.putIfAbsent(colKey, colType); } - for (String maxValueColumn:maxValueColumnNameList) { + for (String maxValueColumn : maxValueColumnNameList) { String colKey = getStateKey(tableName, maxValueColumn.trim().toLowerCase(), dbAdapter); if (!columnTypeMap.containsKey(colKey)) { throw new ProcessException("Column not found in the table/query specified: " + maxValueColumn); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractExecuteSQL.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractExecuteSQL.java index 104bc06ba9..8ac07ace50 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractExecuteSQL.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractExecuteSQL.java @@ -517,13 +517,13 @@ public abstract class AbstractExecuteSQL extends AbstractProcessor { * Extract list of queries from config property */ protected List getQueries(final String value) { - if (value == null || value.length() == 0 || value.trim().length() == 0) { + if (value == null || value.isEmpty() || value.isBlank()) { return null; } final List queries = new LinkedList<>(); for (String query : value.split("(? 0) { + if (!query.isBlank()) { queries.add(query.trim()); } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractJsonPathProcessor.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractJsonPathProcessor.java index cc89fe98d3..c2ecfef702 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractJsonPathProcessor.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractJsonPathProcessor.java @@ -38,7 +38,6 @@ import org.apache.nifi.util.StringUtils; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -53,15 +52,13 @@ import java.util.concurrent.atomic.AtomicReference; */ public abstract class AbstractJsonPathProcessor extends AbstractProcessor { - static final Map NULL_REPRESENTATION_MAP = new HashMap<>(); - static final String EMPTY_STRING_OPTION = "empty string"; static final String NULL_STRING_OPTION = "the string 'null'"; - static { - NULL_REPRESENTATION_MAP.put(EMPTY_STRING_OPTION, ""); - NULL_REPRESENTATION_MAP.put(NULL_STRING_OPTION, "null"); - } + static final Map NULL_REPRESENTATION_MAP = Map.of( + EMPTY_STRING_OPTION, "", + NULL_STRING_OPTION, "null" + ); public static final PropertyDescriptor NULL_VALUE_DEFAULT_REPRESENTATION = new PropertyDescriptor.Builder() .name("Null Value Representation") diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java index ac024e359b..4d6e9d2ced 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AbstractQueryDatabaseTable.java @@ -69,23 +69,23 @@ public abstract class AbstractQueryDatabaseTable extends AbstractDatabaseFetchPr public static final String RESULT_TABLENAME = "tablename"; public static final String RESULT_ROW_COUNT = "querydbtable.row.count"; - private static AllowableValue TRANSACTION_READ_COMMITTED = new AllowableValue( + private static final AllowableValue TRANSACTION_READ_COMMITTED = new AllowableValue( String.valueOf(Connection.TRANSACTION_READ_COMMITTED), "TRANSACTION_READ_COMMITTED" ); - private static AllowableValue TRANSACTION_READ_UNCOMMITTED = new AllowableValue( + private static final AllowableValue TRANSACTION_READ_UNCOMMITTED = new AllowableValue( String.valueOf(Connection.TRANSACTION_READ_UNCOMMITTED), "TRANSACTION_READ_UNCOMMITTED" ); - private static AllowableValue TRANSACTION_REPEATABLE_READ = new AllowableValue( + private static final AllowableValue TRANSACTION_REPEATABLE_READ = new AllowableValue( String.valueOf(Connection.TRANSACTION_REPEATABLE_READ), "TRANSACTION_REPEATABLE_READ" ); - private static AllowableValue TRANSACTION_NONE = new AllowableValue( + private static final AllowableValue TRANSACTION_NONE = new AllowableValue( String.valueOf(Connection.TRANSACTION_NONE), "TRANSACTION_NONE" ); - private static AllowableValue TRANSACTION_SERIALIZABLE = new AllowableValue( + private static final AllowableValue TRANSACTION_SERIALIZABLE = new AllowableValue( String.valueOf(Connection.TRANSACTION_SERIALIZABLE), "TRANSACTION_SERIALIZABLE" ); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AttributesToCSV.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AttributesToCSV.java index fe265f58fb..d16c12bc20 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AttributesToCSV.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AttributesToCSV.java @@ -17,13 +17,13 @@ package org.apache.nifi.processors.standard; -import org.apache.commons.text.StringEscapeUtils; import org.apache.commons.lang3.StringUtils; -import org.apache.nifi.annotation.behavior.WritesAttribute; -import org.apache.nifi.annotation.behavior.WritesAttributes; +import org.apache.commons.text.StringEscapeUtils; +import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.SideEffectFree; import org.apache.nifi.annotation.behavior.SupportsBatching; -import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnScheduled; @@ -34,23 +34,19 @@ import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; -import java.util.Map; -import java.util.Set; -import java.util.HashSet; -import java.util.List; +import java.util.Arrays; import java.util.LinkedHashMap; import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.regex.Pattern; import java.util.stream.Collectors; -import java.util.Collections; -import java.util.Arrays; -import java.util.ArrayList; @SideEffectFree @SupportsBatching @@ -150,13 +146,25 @@ public class AttributesToCSV extends AbstractProcessor { .defaultValue("false") .build(); + private static final List PROPERTIES = List.of( + ATTRIBUTES_LIST, + ATTRIBUTES_REGEX, + DESTINATION, + INCLUDE_CORE_ATTRIBUTES, + NULL_VALUE_FOR_EMPTY_STRING, + INCLUDE_SCHEMA + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") .description("Successfully converted attributes to CSV").build(); public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") .description("Failed to convert attributes to CSV").build(); - private List properties; - private Set relationships; + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); + private volatile Boolean includeCoreAttributes; private volatile Set coreAttributes; private volatile boolean destinationContent; @@ -164,31 +172,14 @@ public class AttributesToCSV extends AbstractProcessor { private volatile Pattern pattern; private volatile Boolean includeSchema; - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(ATTRIBUTES_LIST); - properties.add(ATTRIBUTES_REGEX); - properties.add(DESTINATION); - properties.add(INCLUDE_CORE_ATTRIBUTES); - properties.add(NULL_VALUE_FOR_EMPTY_STRING); - properties.add(INCLUDE_SCHEMA); - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - relationships.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(relationships); - } - @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @@ -311,7 +302,7 @@ public class AttributesToCSV extends AbstractProcessor { if (destinationContent) { FlowFile conFlowfile = session.write(original, (in, out) -> { if (includeSchema) { - sbNames.append(System.getProperty("line.separator")); + sbNames.append(System.lineSeparator()); out.write(sbNames.toString().getBytes()); } out.write(sbValues.toString().getBytes()); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AttributesToJSON.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AttributesToJSON.java index d9ef373a65..29921080f7 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AttributesToJSON.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/AttributesToJSON.java @@ -36,16 +36,13 @@ import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import java.io.BufferedOutputStream; import java.io.OutputStream; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; @@ -157,7 +154,7 @@ public class AttributesToJSON extends AbstractProcessor { .required(true) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .allowableValues(JsonHandlingStrategy.class) - .defaultValue(AttributesToJSON.JsonHandlingStrategy.ESCAPED.getValue()) + .defaultValue(AttributesToJSON.JsonHandlingStrategy.ESCAPED) .build(); public static final PropertyDescriptor PRETTY_PRINT = new PropertyDescriptor.Builder() @@ -170,14 +167,27 @@ public class AttributesToJSON extends AbstractProcessor { .dependsOn(DESTINATION, DESTINATION_CONTENT) .build(); + private static final List PROPERTIES = List.of( + ATTRIBUTES_LIST, + ATTRIBUTES_REGEX, + DESTINATION, + INCLUDE_CORE_ATTRIBUTES, + NULL_VALUE_FOR_EMPTY_STRING, + JSON_HANDLING_STRATEGY, + PRETTY_PRINT + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success") .description("Successfully converted attributes to JSON").build(); public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure") .description("Failed to convert attributes to JSON").build(); + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - private List properties; - private Set relationships; private volatile Set attributesToRemove; private volatile Set attributes; private volatile Boolean nullValueForEmptyString; @@ -186,32 +196,14 @@ public class AttributesToJSON extends AbstractProcessor { private volatile Pattern pattern; private volatile JsonHandlingStrategy jsonHandlingStrategy; - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(ATTRIBUTES_LIST); - properties.add(ATTRIBUTES_REGEX); - properties.add(DESTINATION); - properties.add(INCLUDE_CORE_ATTRIBUTES); - properties.add(NULL_VALUE_FOR_EMPTY_STRING); - properties.add(JSON_HANDLING_STRATEGY); - properties.add(PRETTY_PRINT); - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - relationships.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(relationships); - } - @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } /** @@ -272,7 +264,7 @@ public class AttributesToJSON extends AbstractProcessor { @OnScheduled public void onScheduled(ProcessContext context) { - attributesToRemove = context.getProperty(INCLUDE_CORE_ATTRIBUTES).asBoolean() ? Collections.EMPTY_SET : Arrays.stream(CoreAttributes.values()) + attributesToRemove = context.getProperty(INCLUDE_CORE_ATTRIBUTES).asBoolean() ? Set.of() : Arrays.stream(CoreAttributes.values()) .map(CoreAttributes::key) .collect(Collectors.toSet()); attributes = buildAtrs(context.getProperty(ATTRIBUTES_LIST).getValue()); @@ -280,7 +272,7 @@ public class AttributesToJSON extends AbstractProcessor { destinationContent = DESTINATION_CONTENT.equals(context.getProperty(DESTINATION).getValue()); final boolean prettyPrint = context.getProperty(PRETTY_PRINT).asBoolean(); objectWriter = destinationContent && prettyPrint ? OBJECT_MAPPER.writerWithDefaultPrettyPrinter() : OBJECT_MAPPER.writer(); - jsonHandlingStrategy = JsonHandlingStrategy.valueOf(context.getProperty(JSON_HANDLING_STRATEGY).getValue()); + jsonHandlingStrategy = context.getProperty(JSON_HANDLING_STRATEGY).asAllowableValue(JsonHandlingStrategy.class); if (context.getProperty(ATTRIBUTES_REGEX).isSet()) { pattern = Pattern.compile(context.getProperty(ATTRIBUTES_REGEX).evaluateAttributeExpressions().getValue()); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CalculateRecordStats.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CalculateRecordStats.java index 0ba35a4040..b3b8c2dc92 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CalculateRecordStats.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CalculateRecordStats.java @@ -45,7 +45,6 @@ import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; @@ -87,6 +86,11 @@ public class CalculateRecordStats extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); + static final List PROPERTIES = List.of( + RECORD_READER, + LIMIT + ); + static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("If a flowfile is successfully processed, it goes here.") @@ -96,22 +100,13 @@ public class CalculateRecordStats extends AbstractProcessor { .description("If a flowfile fails to be processed, it goes here.") .build(); + static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); + private RecordPathCache cache; - static final Set RELATIONSHIPS; - static final List PROPERTIES; - - static { - Set _rels = new HashSet(); - _rels.add(REL_SUCCESS); - _rels.add(REL_FAILURE); - RELATIONSHIPS = Collections.unmodifiableSet(_rels); - List _temp = new ArrayList<>(); - _temp.add(RECORD_READER); - _temp.add(LIMIT); - PROPERTIES = Collections.unmodifiableList(_temp); - } - protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) { return new PropertyDescriptor.Builder() .name(propertyDescriptorName) @@ -160,7 +155,7 @@ public class CalculateRecordStats extends AbstractProcessor { protected Map getRecordPaths(ProcessContext context, FlowFile flowFile) { return context.getProperties().keySet() - .stream().filter(p -> p.isDynamic()) + .stream().filter(PropertyDescriptor::isDynamic) .collect(Collectors.toMap( e -> e.getName(), e -> { @@ -189,7 +184,7 @@ public class CalculateRecordStats extends AbstractProcessor { String approxValue = value.get().getValue().toString(); String baseKey = String.format("recordStats.%s", entry.getKey()); String key = String.format("%s.%s", baseKey, approxValue); - Integer stat = retVal.containsKey(key) ? retVal.get(key) : 0; + Integer stat = retVal.getOrDefault(key, 0); Integer baseStat = retVal.getOrDefault(baseKey, 0); stat++; baseStat++; @@ -224,10 +219,10 @@ public class CalculateRecordStats extends AbstractProcessor { protected Map filterBySize(Map values, Integer limit, List baseKeys) { Map toFilter = values.entrySet().stream() .filter(e -> !baseKeys.contains(e.getKey())) - .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue())); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); Map retVal = values.entrySet().stream() .filter((e -> baseKeys.contains(e.getKey()))) - .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue())); + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); List> _flat = new ArrayList<>(toFilter.entrySet()); _flat.sort(Map.Entry.comparingByValue()); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java index 15d4b03f7c..27504ad324 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CompressContent.java @@ -187,6 +187,13 @@ public class CompressContent extends AbstractProcessor { .defaultValue("false") .build(); + private static final List PROPERTIES = List.of( + MODE, + COMPRESSION_FORMAT, + COMPRESSION_LEVEL, + UPDATE_FILENAME + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("FlowFiles will be transferred to the success relationship after successfully being compressed or decompressed") @@ -196,13 +203,10 @@ public class CompressContent extends AbstractProcessor { .description("FlowFiles will be transferred to the failure relationship if they fail to compress/decompress") .build(); - private final List properties = List.of(MODE, - COMPRESSION_FORMAT, - COMPRESSION_LEVEL, - UPDATE_FILENAME); - - private final Set relationships = Set.of(REL_SUCCESS, - REL_FAILURE); + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); private final Map compressionFormatMimeTypeMap = Map.ofEntries( Map.entry("application/gzip", COMPRESSION_FORMAT_GZIP), @@ -222,12 +226,12 @@ public class CompressContent extends AbstractProcessor { @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override @@ -439,7 +443,7 @@ public class CompressContent extends AbstractProcessor { if (closeable != null) { try { closeable.close(); - } catch (final Exception e) { + } catch (final Exception ignored) { } } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java index a7bdae2e99..38382efb06 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ControlRate.java @@ -208,6 +208,17 @@ public class ControlRate extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.NONE) .build(); + private static final List PROPERTIES = List.of( + RATE_CONTROL_CRITERIA, + TIME_PERIOD, + MAX_RATE, + MAX_DATA_RATE, + MAX_COUNT_RATE, + RATE_EXCEEDED_STRATEGY, + RATE_CONTROL_ATTRIBUTE_NAME, + GROUPING_ATTRIBUTE_NAME + ); + static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("FlowFiles are transferred to this relationship under normal conditions") @@ -222,23 +233,20 @@ public class ControlRate extends AbstractProcessor { "Strategy is configured to use this Relationship.") .build(); + private static final Set DEFAULT_RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); + private static final Set RATE_EXCEEDED_RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE, + REL_RATE_EXCEEDED + ); + private static final Pattern POSITIVE_LONG_PATTERN = Pattern.compile("0*[1-9][0-9]*"); private static final String DEFAULT_GROUP_ATTRIBUTE = ControlRate.class.getName() + "###____DEFAULT_GROUP_ATTRIBUTE___###"; - private static final List properties = List.of( - RATE_CONTROL_CRITERIA, - TIME_PERIOD, - MAX_RATE, - MAX_DATA_RATE, - MAX_COUNT_RATE, - RATE_EXCEEDED_STRATEGY, - RATE_CONTROL_ATTRIBUTE_NAME, - GROUPING_ATTRIBUTE_NAME - ); - - private static final Set defaultRelationships = Set.of(REL_SUCCESS, REL_FAILURE); - private static final Set rateExceededRelationships = Set.of(REL_SUCCESS, REL_FAILURE, REL_RATE_EXCEEDED); - private volatile Set relationships = defaultRelationships; + private volatile Set relationships = DEFAULT_RELATIONSHIPS; private final ConcurrentMap dataThrottleMap = new ConcurrentHashMap<>(); private final ConcurrentMap countThrottleMap = new ConcurrentHashMap<>(); @@ -253,7 +261,7 @@ public class ControlRate extends AbstractProcessor { @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override @@ -300,9 +308,9 @@ public class ControlRate extends AbstractProcessor { if (descriptor.equals(RATE_EXCEEDED_STRATEGY)) { if (ROUTE_TO_RATE_EXCEEDED.getValue().equalsIgnoreCase(newValue)) { - this.relationships = rateExceededRelationships; + this.relationships = RATE_EXCEEDED_RELATIONSHIPS; } else { - this.relationships = defaultRelationships; + this.relationships = DEFAULT_RELATIONSHIPS; } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java index 8c6785cce6..a45fbf864b 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertCharacterSet.java @@ -16,6 +16,25 @@ */ package org.apache.nifi.processors.standard; +import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; +import org.apache.nifi.annotation.behavior.SideEffectFree; +import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; +import org.apache.nifi.components.PropertyDescriptor; +import org.apache.nifi.expression.ExpressionLanguageScope; +import org.apache.nifi.flowfile.FlowFile; +import org.apache.nifi.logging.ComponentLog; +import org.apache.nifi.processor.AbstractProcessor; +import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.ProcessSession; +import org.apache.nifi.processor.Relationship; +import org.apache.nifi.processor.exception.ProcessException; +import org.apache.nifi.processor.io.StreamCallback; +import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.util.StopWatch; + import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; @@ -23,35 +42,11 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; - -import org.apache.nifi.processor.ProcessContext; -import org.apache.nifi.processor.AbstractProcessor; -import org.apache.nifi.processor.ProcessorInitializationContext; -import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.Relationship; -import org.apache.nifi.components.PropertyDescriptor; -import org.apache.nifi.expression.ExpressionLanguageScope; -import org.apache.nifi.flowfile.FlowFile; -import org.apache.nifi.logging.ComponentLog; -import org.apache.nifi.annotation.documentation.CapabilityDescription; -import org.apache.nifi.annotation.behavior.InputRequirement; -import org.apache.nifi.annotation.behavior.SideEffectFree; -import org.apache.nifi.annotation.behavior.SupportsBatching; -import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; -import org.apache.nifi.annotation.documentation.Tags; -import org.apache.nifi.processor.exception.ProcessException; -import org.apache.nifi.processor.io.StreamCallback; -import org.apache.nifi.processor.util.StandardValidators; -import org.apache.nifi.util.StopWatch; - import java.nio.CharBuffer; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CharsetEncoder; import java.nio.charset.CodingErrorAction; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -99,33 +94,25 @@ public class ConvertCharacterSet extends AbstractProcessor { .required(true) .build(); + private final List PROPERTIES = List.of( + INPUT_CHARSET, + OUTPUT_CHARSET + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success").description("").build(); + private final Set RELATIONSHIPS = Set.of(REL_SUCCESS); + public static final int MAX_BUFFER_SIZE = 512 * 1024; - private Set relationships; - private List properties; - - @Override - protected void init(final ProcessorInitializationContext context) { - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - this.relationships = Collections.unmodifiableSet(relationships); - - final List properties = new ArrayList<>(); - properties.add(INPUT_CHARSET); - properties.add(OUTPUT_CHARSET); - this.properties = Collections.unmodifiableList(properties); - } - @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertRecord.java index d04a36fe4c..7bc05979ca 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ConvertRecord.java @@ -32,6 +32,7 @@ import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.serialization.record.Record; import java.util.ArrayList; +import java.util.Collections; import java.util.List; @SupportsBatching @@ -60,12 +61,11 @@ public class ConvertRecord extends AbstractRecordProcessor { protected List getSupportedPropertyDescriptors() { final List properties = new ArrayList<>(super.getSupportedPropertyDescriptors()); properties.add(INCLUDE_ZERO_RECORD_FLOWFILES); - return properties; + return Collections.unmodifiableList(properties); } @Override protected Record process(final Record record, final FlowFile flowFile, final ProcessContext context, final long count) { return record; } - } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CountText.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CountText.java index 2ff3458641..276078add0 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CountText.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CountText.java @@ -16,26 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; -import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; -import java.text.DecimalFormat; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.regex.Pattern; -import java.util.stream.Collectors; -import java.util.stream.Stream; - import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.SideEffectFree; @@ -56,6 +36,23 @@ import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.util.StringUtils; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.text.DecimalFormat; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.regex.Pattern; +import java.util.stream.Collectors; +import java.util.stream.Stream; + @SideEffectFree @SupportsBatching @Tags({"count", "text", "line", "word", "character"}) @@ -70,13 +67,14 @@ import org.apache.nifi.util.StringUtils; }) @SeeAlso(SplitText.class) public class CountText extends AbstractProcessor { - private static final List STANDARD_CHARSETS = Arrays.asList( + private static final List STANDARD_CHARSETS = List.of( StandardCharsets.UTF_8, StandardCharsets.US_ASCII, StandardCharsets.ISO_8859_1, StandardCharsets.UTF_16, StandardCharsets.UTF_16LE, - StandardCharsets.UTF_16BE); + StandardCharsets.UTF_16BE + ); private static final Pattern SYMBOL_PATTERN = Pattern.compile("[\\s-\\._]"); private static final Pattern WHITESPACE_ONLY_PATTERN = Pattern.compile("\\s"); @@ -152,9 +150,16 @@ public class CountText extends AbstractProcessor { .defaultValue("false") .addValidator(StandardValidators.BOOLEAN_VALIDATOR) .build(); - private static Set getStandardCharsetNames() { - return STANDARD_CHARSETS.stream().map(c -> c.displayName()).collect(Collectors.toSet()); - } + + private static final List PROPERTIES = List.of( + TEXT_LINE_COUNT_PD, + TEXT_LINE_NONEMPTY_COUNT_PD, + TEXT_WORD_COUNT_PD, + TEXT_CHARACTER_COUNT_PD, + SPLIT_WORDS_ON_SYMBOLS_PD, + CHARACTER_ENCODING_PD, + ADJUST_IMMEDIATELY + ); public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") @@ -165,20 +170,13 @@ public class CountText extends AbstractProcessor { .description("If the flowfile text cannot be counted for some reason, the original file will be routed to this destination and nothing will be routed elsewhere") .build(); - private static final List properties; - private static final Set relationships; + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); - static { - properties = Collections.unmodifiableList(Arrays.asList(TEXT_LINE_COUNT_PD, - TEXT_LINE_NONEMPTY_COUNT_PD, - TEXT_WORD_COUNT_PD, - TEXT_CHARACTER_COUNT_PD, - SPLIT_WORDS_ON_SYMBOLS_PD, - CHARACTER_ENCODING_PD, - ADJUST_IMMEDIATELY)); - - relationships = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(REL_SUCCESS, - REL_FAILURE))); + private static Set getStandardCharsetNames() { + return STANDARD_CHARSETS.stream().map(Charset::displayName).collect(Collectors.toSet()); } private volatile boolean countLines; @@ -191,7 +189,7 @@ public class CountText extends AbstractProcessor { @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @OnScheduled @@ -240,7 +238,7 @@ public class CountText extends AbstractProcessor { } if (countLinesNonEmpty) { - if (line.trim().length() > 0) { + if (!line.isBlank()) { lineNonEmptyCount.incrementAndGet(); } } @@ -316,7 +314,7 @@ public class CountText extends AbstractProcessor { } int countWordsInLine(String line, boolean splitWordsOnSymbols) throws IOException { - if (line == null || line.trim().length() == 0) { + if (line == null || line.isBlank()) { return 0; } else { Pattern regex = splitWordsOnSymbols ? SYMBOL_PATTERN : WHITESPACE_ONLY_PATTERN; @@ -333,6 +331,6 @@ public class CountText extends AbstractProcessor { @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CryptographicHashContent.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CryptographicHashContent.java index 87e11ba0ec..3f43f78c9a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CryptographicHashContent.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/CryptographicHashContent.java @@ -17,12 +17,6 @@ package org.apache.nifi.processors.standard; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.SideEffectFree; import org.apache.nifi.annotation.behavior.SupportsBatching; @@ -35,13 +29,16 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processors.standard.hash.HashAlgorithm; import org.apache.nifi.processors.standard.hash.HashService; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + @SideEffectFree @SupportsBatching @Tags({"content", "hash", "sha", "blake2", "md5", "cryptography"}) @@ -73,6 +70,11 @@ public class CryptographicHashContent extends AbstractProcessor { .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); + private static final List PROPERTIES = List.of( + FAIL_WHEN_EMPTY, + HASH_ALGORITHM + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("Used for flowfiles that have a hash value added") @@ -83,31 +85,19 @@ public class CryptographicHashContent extends AbstractProcessor { .description("Used for flowfiles that have no content if the 'fail on empty' setting is enabled") .build(); - private static Set relationships; - - private static List properties; - - @Override - protected void init(final ProcessorInitializationContext context) { - final Set _relationships = new HashSet<>(); - _relationships.add(REL_FAILURE); - _relationships.add(REL_SUCCESS); - relationships = Collections.unmodifiableSet(_relationships); - - final List _properties = new ArrayList<>(); - _properties.add(FAIL_WHEN_EMPTY); - _properties.add(HASH_ALGORITHM); - properties = Collections.unmodifiableList(_properties); - } + private static final Set RELATIONSHIPS = Set.of( + REL_FAILURE, + REL_SUCCESS + ); @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DebugFlow.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DebugFlow.java index ad7f5ad1d0..149b0e1070 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DebugFlow.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DebugFlow.java @@ -16,19 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.IOException; -import java.io.OutputStream; -import java.lang.reflect.InvocationTargetException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Random; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; - import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnScheduled; @@ -51,6 +38,16 @@ import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.io.OutputStreamCallback; import org.apache.nifi.processor.util.StandardValidators; +import java.io.IOException; +import java.io.OutputStream; +import java.lang.reflect.InvocationTargetException; +import java.util.Collection; +import java.util.List; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + @Tags({"test", "debug", "processor", "utility", "flow", "FlowFile"}) @CapabilityDescription("The DebugFlow processor aids testing and debugging the FlowFile framework by allowing various " + "responses to be explicitly triggered in response to the receipt of a FlowFile or a timer event without a " @@ -69,7 +66,7 @@ public class DebugFlow extends AbstractProcessor { .description("FlowFiles that failed to process.") .build(); - private final AtomicReference> propertyDescriptors = new AtomicReference<>(); + private final AtomicReference> properties = new AtomicReference<>(); static final PropertyDescriptor FF_SUCCESS_ITERATIONS = new PropertyDescriptor.Builder() .name("FlowFile Success Iterations") @@ -264,10 +261,7 @@ public class DebugFlow extends AbstractProcessor { public Set getRelationships() { synchronized (relationships) { if (relationships.get() == null) { - HashSet relSet = new HashSet<>(); - relSet.add(REL_SUCCESS); - relSet.add(REL_FAILURE); - relationships.compareAndSet(null, Collections.unmodifiableSet(relSet)); + relationships.compareAndSet(null, Set.of(REL_SUCCESS, REL_FAILURE)); } return relationships.get(); } @@ -275,35 +269,36 @@ public class DebugFlow extends AbstractProcessor { @Override protected List getSupportedPropertyDescriptors() { - synchronized (propertyDescriptors) { - if (propertyDescriptors.get() == null) { - ArrayList propList = new ArrayList<>(); - propList.add(FF_SUCCESS_ITERATIONS); - propList.add(FF_FAILURE_ITERATIONS); - propList.add(FF_ROLLBACK_ITERATIONS); - propList.add(FF_ROLLBACK_YIELD_ITERATIONS); - propList.add(FF_ROLLBACK_PENALTY_ITERATIONS); - propList.add(FF_EXCEPTION_ITERATIONS); - propList.add(FF_EXCEPTION_CLASS); - propList.add(NO_FF_SKIP_ITERATIONS); - propList.add(NO_FF_EXCEPTION_ITERATIONS); - propList.add(NO_FF_YIELD_ITERATIONS); - propList.add(NO_FF_EXCEPTION_CLASS); - propList.add(WRITE_ITERATIONS); - propList.add(CONTENT_SIZE); - propList.add(ON_SCHEDULED_SLEEP_TIME); - propList.add(ON_SCHEDULED_FAIL); - propList.add(ON_UNSCHEDULED_SLEEP_TIME); - propList.add(ON_UNSCHEDULED_FAIL); - propList.add(ON_STOPPED_SLEEP_TIME); - propList.add(ON_STOPPED_FAIL); - propList.add(ON_TRIGGER_SLEEP_TIME); - propList.add(CUSTOM_VALIDATE_SLEEP_TIME); - propList.add(IGNORE_INTERRUPTS); + synchronized (properties) { + if (properties.get() == null) { + List properties = List.of( + FF_SUCCESS_ITERATIONS, + FF_FAILURE_ITERATIONS, + FF_ROLLBACK_ITERATIONS, + FF_ROLLBACK_YIELD_ITERATIONS, + FF_ROLLBACK_PENALTY_ITERATIONS, + FF_EXCEPTION_ITERATIONS, + FF_EXCEPTION_CLASS, + NO_FF_SKIP_ITERATIONS, + NO_FF_EXCEPTION_ITERATIONS, + NO_FF_YIELD_ITERATIONS, + NO_FF_EXCEPTION_CLASS, + WRITE_ITERATIONS, + CONTENT_SIZE, + ON_SCHEDULED_SLEEP_TIME, + ON_SCHEDULED_FAIL, + ON_UNSCHEDULED_SLEEP_TIME, + ON_UNSCHEDULED_FAIL, + ON_STOPPED_SLEEP_TIME, + ON_STOPPED_FAIL, + ON_TRIGGER_SLEEP_TIME, + CUSTOM_VALIDATE_SLEEP_TIME, + IGNORE_INTERRUPTS + ); - propertyDescriptors.compareAndSet(null, Collections.unmodifiableList(propList)); + this.properties.compareAndSet(null, properties); } - return propertyDescriptors.get(); + return properties.get(); } } @@ -352,7 +347,7 @@ public class DebugFlow extends AbstractProcessor { } catch (InterruptedException e) { Thread.currentThread().interrupt(); - return Collections.singleton(new ValidationResult.Builder() + return Set.of(new ValidationResult.Builder() .valid(false) .subject("Validation") .explanation("Processor Interrupted while performing validation").build()); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DeduplicateRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DeduplicateRecord.java index 5bfc5066ae..21eb850913 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DeduplicateRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DeduplicateRecord.java @@ -47,7 +47,6 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; @@ -71,7 +70,6 @@ import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -266,8 +264,19 @@ public class DeduplicateRecord extends AbstractProcessor { .required(false) .build(); - - // RELATIONSHIPS + private static final List PROPERTIES = List.of( + DEDUPLICATION_STRATEGY, + DISTRIBUTED_MAP_CACHE, + CACHE_IDENTIFIER, + PUT_CACHE_IDENTIFIER, + RECORD_READER, + RECORD_WRITER, + INCLUDE_ZERO_RECORD_FLOWFILES, + RECORD_HASHING_ALGORITHM, + FILTER_TYPE, + FILTER_CAPACITY_HINT, + BLOOM_FILTER_FPP + ); static final Relationship REL_DUPLICATE = new Relationship.Builder() .name("duplicate") @@ -289,42 +298,21 @@ public class DeduplicateRecord extends AbstractProcessor { .description("If unable to communicate with the cache, the FlowFile will be penalized and routed to this relationship") .build(); - private List descriptors; - - private Set relationships; - - @Override - protected void init(final ProcessorInitializationContext context) { - final List descriptors = new ArrayList<>(); - descriptors.add(DEDUPLICATION_STRATEGY); - descriptors.add(DISTRIBUTED_MAP_CACHE); - descriptors.add(CACHE_IDENTIFIER); - descriptors.add(PUT_CACHE_IDENTIFIER); - descriptors.add(RECORD_READER); - descriptors.add(RECORD_WRITER); - descriptors.add(INCLUDE_ZERO_RECORD_FLOWFILES); - descriptors.add(RECORD_HASHING_ALGORITHM); - descriptors.add(FILTER_TYPE); - descriptors.add(FILTER_CAPACITY_HINT); - descriptors.add(BLOOM_FILTER_FPP); - this.descriptors = Collections.unmodifiableList(descriptors); - - final Set relationships = new HashSet<>(); - relationships.add(REL_DUPLICATE); - relationships.add(REL_NON_DUPLICATE); - relationships.add(REL_ORIGINAL); - relationships.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(relationships); - } + private static final Set RELATIONSHIPS = Set.of( + REL_DUPLICATE, + REL_NON_DUPLICATE, + REL_ORIGINAL, + REL_FAILURE + ); @Override public Set getRelationships() { - return this.relationships; + return RELATIONSHIPS; } @Override public final List getSupportedPropertyDescriptors() { - return descriptors; + return PROPERTIES; } @Override @@ -552,7 +540,7 @@ public class DeduplicateRecord extends AbstractProcessor { final String value = context.getProperty(propertyDescriptor).evaluateAttributeExpressions(flowFile).getValue(); final RecordPath recordPath = recordPathCache.getCompiled(value); final RecordPathResult result = recordPath.evaluate(record); - final List selectedFields = result.getSelectedFields().collect(Collectors.toList()); + final List selectedFields = result.getSelectedFields().toList(); // Add the name of the dynamic property fieldValues.add(propertyDescriptor.getName()); @@ -561,7 +549,7 @@ public class DeduplicateRecord extends AbstractProcessor { fieldValues.addAll(selectedFields.stream() .filter(f -> f.getValue() != null) .map(f -> f.getValue().toString()) - .collect(Collectors.toList()) + .toList() ); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DeleteFile.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DeleteFile.java index ae885ea2e4..befcf8fc8e 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DeleteFile.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DeleteFile.java @@ -85,7 +85,11 @@ public class DeleteFile extends AbstractProcessor { .description("All FlowFiles, for which an existing file could not be deleted, are routed to this relationship") .build(); - private final static Set relationships = Set.of(REL_SUCCESS, REL_NOT_FOUND, REL_FAILURE); + private final static Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_NOT_FOUND, + REL_FAILURE + ); public static final PropertyDescriptor DIRECTORY_PATH = new PropertyDescriptor.Builder() .name("Directory Path") @@ -105,16 +109,19 @@ public class DeleteFile extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); - private final static List properties = List.of(DIRECTORY_PATH, FILENAME); + private final static List PROPERTIES = List.of( + DIRECTORY_PATH, + FILENAME + ); @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java index 201d3bccf3..50d20012e9 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DetectDuplicate.java @@ -16,33 +16,23 @@ */ package org.apache.nifi.processors.standard; -import java.io.IOException; -import java.io.OutputStream; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.TimeUnit; - import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.behavior.DefaultRunDuration; import org.apache.nifi.annotation.behavior.InputRequirement; +import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.WritesAttribute; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.SeeAlso; import org.apache.nifi.annotation.documentation.Tags; -import org.apache.nifi.annotation.behavior.WritesAttribute; -import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.distributed.cache.client.Deserializer; import org.apache.nifi.distributed.cache.client.DistributedMapCacheClient; import org.apache.nifi.distributed.cache.client.Serializer; import org.apache.nifi.distributed.cache.client.exception.DeserializationException; import org.apache.nifi.distributed.cache.client.exception.SerializationException; -import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.expression.AttributeExpression.ResultType; +import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; @@ -52,6 +42,13 @@ import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; + @SupportsBatching(defaultDuration = DefaultRunDuration.TWENTY_FIVE_MILLIS) @Tags({"hash", "dupe", "duplicate", "dedupe"}) @InputRequirement(Requirement.INPUT_REQUIRED) @@ -111,6 +108,14 @@ public class DetectDuplicate extends AbstractProcessor { .defaultValue("true") .build(); + private static final List PROPERTIES = List.of( + CACHE_ENTRY_IDENTIFIER, + FLOWFILE_DESCRIPTION, + AGE_OFF_DURATION, + DISTRIBUTED_CACHE_SERVICE, + CACHE_IDENTIFIER + ); + public static final Relationship REL_DUPLICATE = new Relationship.Builder() .name("duplicate") .description("If a FlowFile has been detected to be a duplicate, it will be routed to this relationship") @@ -123,34 +128,25 @@ public class DetectDuplicate extends AbstractProcessor { .name("failure") .description("If unable to communicate with the cache, the FlowFile will be penalized and routed to this relationship") .build(); - private final Set relationships; + + private static final Set RELATIONSHIPS = Set.of( + REL_DUPLICATE, + REL_NON_DUPLICATE, + REL_FAILURE + ); private final Serializer keySerializer = new StringSerializer(); private final Serializer valueSerializer = new CacheValueSerializer(); private final Deserializer valueDeserializer = new CacheValueDeserializer(); - public DetectDuplicate() { - final Set rels = new HashSet<>(); - rels.add(REL_DUPLICATE); - rels.add(REL_NON_DUPLICATE); - rels.add(REL_FAILURE); - relationships = Collections.unmodifiableSet(rels); - } - @Override protected List getSupportedPropertyDescriptors() { - final List descriptors = new ArrayList<>(); - descriptors.add(CACHE_ENTRY_IDENTIFIER); - descriptors.add(FLOWFILE_DESCRIPTION); - descriptors.add(AGE_OFF_DURATION); - descriptors.add(DISTRIBUTED_CACHE_SERVICE); - descriptors.add(CACHE_IDENTIFIER); - return descriptors; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override @@ -272,8 +268,7 @@ public class DetectDuplicate extends AbstractProcessor { + ((input[6] & 255) << 8) + ((input[7] & 255)); String description = new String(input, 8, input.length - 8, StandardCharsets.UTF_8); - CacheValue value = new CacheValue(description, time); - return value; + return new CacheValue(description, time); } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java index a30d10a1f4..8077bc2b91 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DistributeLoad.java @@ -16,17 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; - import org.apache.nifi.annotation.behavior.DefaultRunDuration; import org.apache.nifi.annotation.behavior.DynamicProperty; import org.apache.nifi.annotation.behavior.DynamicRelationship; @@ -53,6 +42,16 @@ import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.util.StandardValidators; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; + @SideEffectFree @SupportsBatching(defaultDuration = DefaultRunDuration.TWENTY_FIVE_MILLIS) @InputRequirement(Requirement.INPUT_REQUIRED) @@ -83,8 +82,6 @@ public class DistributeLoad extends AbstractProcessor { public static final AllowableValue STRATEGY_OVERFLOW = new AllowableValue(OVERFLOW, OVERFLOW, "Relationship selection is the first available relationship without further distribution among all relationships; at least one relationship must be available."); - - public static final PropertyDescriptor NUM_RELATIONSHIPS = new PropertyDescriptor.Builder() .name("Number of Relationships") .description("Determines the number of Relationships to which the load should be distributed") @@ -99,9 +96,14 @@ public class DistributeLoad extends AbstractProcessor { .allowableValues(STRATEGY_ROUND_ROBIN, STRATEGY_NEXT_AVAILABLE, STRATEGY_OVERFLOW) .defaultValue(ROUND_ROBIN) .build(); + + private List properties = List.of( + NUM_RELATIONSHIPS, + DISTRIBUTION_STRATEGY + ); + public static final String RELATIONSHIP_ATTRIBUTE = "distribute.load.relationship"; - private List properties; private final AtomicReference> relationshipsRef = new AtomicReference<>(); private final AtomicReference strategyRef = new AtomicReference<>(new RoundRobinStrategy()); private final AtomicReference> weightedRelationshipListRef = new AtomicReference<>(); @@ -109,14 +111,7 @@ public class DistributeLoad extends AbstractProcessor { @Override protected void init(final ProcessorInitializationContext context) { - final Set relationships = new HashSet<>(); - relationships.add(createRelationship(1)); - relationshipsRef.set(Collections.unmodifiableSet(relationships)); - - final List properties = new ArrayList<>(); - properties.add(NUM_RELATIONSHIPS); - properties.add(DISTRIBUTION_STRATEGY); - this.properties = Collections.unmodifiableList(properties); + relationshipsRef.set(Set.of(createRelationship(1))); } private static Relationship createRelationship(final int num) { @@ -136,7 +131,7 @@ public class DistributeLoad extends AbstractProcessor { for (int i = 1; i <= Integer.parseInt(newValue); i++) { relationships.add(createRelationship(i)); } - this.relationshipsRef.set(Collections.unmodifiableSet(relationships)); + this.relationshipsRef.set(Set.copyOf(relationships)); } else if (descriptor.equals(DISTRIBUTION_STRATEGY)) { switch (newValue.toLowerCase()) { case ROUND_ROBIN: @@ -158,10 +153,7 @@ public class DistributeLoad extends AbstractProcessor { @Override protected List getSupportedPropertyDescriptors() { if (doSetProps.getAndSet(false)) { - final List props = new ArrayList<>(); - props.add(NUM_RELATIONSHIPS); - props.add(DISTRIBUTION_STRATEGY); - this.properties = Collections.unmodifiableList(props); + this.properties = List.of(NUM_RELATIONSHIPS, DISTRIBUTION_STRATEGY); } return properties; } @@ -215,7 +207,7 @@ public class DistributeLoad extends AbstractProcessor { } } - this.weightedRelationshipListRef.set(Collections.unmodifiableList(relationshipList)); + this.weightedRelationshipListRef.set(List.copyOf(relationshipList)); } @Override @@ -291,8 +283,7 @@ public class DistributeLoad extends AbstractProcessor { final List relationshipList = DistributeLoad.this.weightedRelationshipListRef.get(); final long counterValue = counter.getAndIncrement(); final int idx = (int) (counterValue % relationshipList.size()); - final Relationship relationship = relationshipList.get(idx); - return relationship; + return relationshipList.get(idx); } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java index 26681e7ee3..50061ab939 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/DuplicateFlowFile.java @@ -16,13 +16,9 @@ */ package org.apache.nifi.processors.standard; -import java.util.Collections; -import java.util.List; -import java.util.Set; - import org.apache.nifi.annotation.behavior.InputRequirement; -import org.apache.nifi.annotation.behavior.SupportsBatching; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; +import org.apache.nifi.annotation.behavior.SupportsBatching; import org.apache.nifi.annotation.behavior.WritesAttribute; import org.apache.nifi.annotation.behavior.WritesAttributes; import org.apache.nifi.annotation.documentation.CapabilityDescription; @@ -37,6 +33,9 @@ import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; +import java.util.List; +import java.util.Set; + @SupportsBatching @Tags({"test", "load", "duplicate"}) @InputRequirement(Requirement.INPUT_REQUIRED) @@ -60,19 +59,23 @@ public class DuplicateFlowFile extends AbstractProcessor { .addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR) .build(); + private static final List PROPERTIES = List.of(NUM_COPIES); + static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("The original FlowFile and all copies will be sent to this relationship") .build(); + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); + @Override public Set getRelationships() { - return Collections.singleton(REL_SUCCESS); + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return Collections.singletonList(NUM_COPIES); + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java index 29f23ee3ed..d8ceef2577 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EncodeContent.java @@ -16,14 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.Arrays; -import java.util.List; -import java.util.Set; -import java.util.concurrent.TimeUnit; - import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.binary.Base32InputStream; import org.apache.commons.codec.binary.Base32OutputStream; @@ -53,6 +45,14 @@ import org.apache.nifi.processors.standard.util.ValidatingBase64InputStream; import org.apache.nifi.stream.io.StreamUtils; import org.apache.nifi.util.StopWatch; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.Arrays; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; + @SideEffectFree @SupportsBatching @InputRequirement(Requirement.INPUT_REQUIRED) @@ -101,6 +101,13 @@ public class EncodeContent extends AbstractProcessor { .dependsOn(LINE_OUTPUT_MODE, LineOutputMode.MULTIPLE_LINES) .build(); + private static final List PROPERTIES = List.of( + MODE, + ENCODING, + LINE_OUTPUT_MODE, + ENCODED_LINE_LENGTH + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("Any FlowFile that is successfully encoded or decoded will be routed to success") @@ -111,27 +118,23 @@ public class EncodeContent extends AbstractProcessor { .description("Any FlowFile that cannot be encoded or decoded will be routed to failure") .build(); + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); + private static final int BUFFER_SIZE = 8192; private static final String LINE_FEED_SEPARATOR = "\n"; - private static final List properties = List.of( - MODE, - ENCODING, - LINE_OUTPUT_MODE, - ENCODED_LINE_LENGTH - ); - - private static final Set relationships = Set.of(REL_SUCCESS, REL_FAILURE); - @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override @@ -141,9 +144,9 @@ public class EncodeContent extends AbstractProcessor { return; } - final boolean encode = context.getProperty(MODE).getValue().equals(EncodingMode.ENCODE.getValue()); - final EncodingType encoding = getEncodingType(context.getProperty(ENCODING).getValue()); - final boolean singleLineOutput = context.getProperty(LINE_OUTPUT_MODE).getValue().equals(LineOutputMode.SINGLE_LINE.getValue()); + final boolean encode = context.getProperty(MODE).asAllowableValue(EncodingMode.class).equals(EncodingMode.ENCODE); + final EncodingType encoding = context.getProperty(ENCODING).asAllowableValue(EncodingType.class); + final boolean singleLineOutput = context.getProperty(LINE_OUTPUT_MODE).asAllowableValue(LineOutputMode.class).equals(LineOutputMode.SINGLE_LINE); final int lineLength = singleLineOutput ? -1 : context.getProperty(ENCODED_LINE_LENGTH).evaluateAttributeExpressions(flowFile).asInteger(); final StreamCallback callback = getStreamCallback(encode, encoding, lineLength); @@ -279,14 +282,4 @@ public class EncodeContent extends AbstractProcessor { out.flush(); } } - - private static EncodingType getEncodingType(final String encodingTypeValue) { - if (EncodingType.BASE64.getValue().equals(encodingTypeValue)) { - return EncodingType.BASE64; - } else if (EncodingType.BASE32.getValue().equals(encodingTypeValue)) { - return EncodingType.BASE32; - } else { - return EncodingType.HEXADECIMAL; - } - } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EnforceOrder.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EnforceOrder.java index 588ca0f7c6..15603c1477 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EnforceOrder.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EnforceOrder.java @@ -43,10 +43,8 @@ import org.apache.nifi.processor.util.StandardValidators; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.Comparator; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -55,7 +53,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; -import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -181,6 +178,16 @@ public class EnforceOrder extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.NONE) .build(); + private static final List PROPERTIES = List.of( + GROUP_IDENTIFIER, + ORDER_ATTRIBUTE, + INITIAL_ORDER, + MAX_ORDER, + BATCH_COUNT, + WAIT_TIMEOUT, + INACTIVE_TIMEOUT + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("A FlowFile with a matching order number will be routed to this relationship.") @@ -206,37 +213,24 @@ public class EnforceOrder extends AbstractProcessor { .description("A FlowFile that has an order younger than current, which means arrived too late and skipped, will be routed to this relationship.") .build(); - private final Set relationships; - - public EnforceOrder() { - final Set rels = new HashSet<>(); - rels.add(REL_SUCCESS); - rels.add(REL_WAIT); - rels.add(REL_OVERTOOK); - rels.add(REL_FAILURE); - rels.add(REL_SKIPPED); - relationships = Collections.unmodifiableSet(rels); - } + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_WAIT, + REL_OVERTOOK, + REL_FAILURE, + REL_SKIPPED + ); @Override protected List getSupportedPropertyDescriptors() { - final List descriptors = new ArrayList<>(); - descriptors.add(GROUP_IDENTIFIER); - descriptors.add(ORDER_ATTRIBUTE); - descriptors.add(INITIAL_ORDER); - descriptors.add(MAX_ORDER); - descriptors.add(BATCH_COUNT); - descriptors.add(WAIT_TIMEOUT); - descriptors.add(INACTIVE_TIMEOUT); - return descriptors; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } - @Override protected Collection customValidate(ValidationContext validationContext) { final List results = new ArrayList<>(super.customValidate(validationContext)); @@ -445,53 +439,55 @@ public class EnforceOrder extends AbstractProcessor { } private void transferFlowFiles() { - flowFileGroups.entrySet().stream().filter(entry -> !entry.getValue().isEmpty()).map(entry -> { - // Sort flow files within each group. - final List groupedFlowFiles = entry.getValue(); - groupedFlowFiles.sort(Comparator.comparing(getOrder)); - return entry; - }).forEach(entry -> { - // Check current state. - final String groupId = entry.getKey(); - final String stateKeyOrder = STATE_TARGET_ORDER.apply(groupId); - final int previousTargetOrder = Integer.parseInt(groupStates.get(stateKeyOrder)); - final AtomicInteger targetOrder = new AtomicInteger(previousTargetOrder); - final List groupedFlowFiles = entry.getValue(); - final String maxOrderStr = groupStates.get(STATE_MAX_ORDER.apply(groupId)); + flowFileGroups.entrySet().stream() + .filter(entry -> !entry.getValue().isEmpty()) + .peek(entry -> { + // Sort flow files within each group. + final List groupedFlowFiles = entry.getValue(); + groupedFlowFiles.sort(Comparator.comparing(getOrder)); + }) + .forEach(entry -> { + // Check current state. + final String groupId = entry.getKey(); + final String stateKeyOrder = STATE_TARGET_ORDER.apply(groupId); + final int previousTargetOrder = Integer.parseInt(groupStates.get(stateKeyOrder)); + final AtomicInteger targetOrder = new AtomicInteger(previousTargetOrder); + final List groupedFlowFiles = entry.getValue(); + final String maxOrderStr = groupStates.get(STATE_MAX_ORDER.apply(groupId)); - groupedFlowFiles.forEach(f -> { - final Integer order = getOrder.apply(f); - final boolean isMaxOrder = !isBlank(maxOrderStr) && order.equals(Integer.parseInt(maxOrderStr)); + groupedFlowFiles.forEach(f -> { + final Integer order = getOrder.apply(f); + final boolean isMaxOrder = !isBlank(maxOrderStr) && order.equals(Integer.parseInt(maxOrderStr)); - if (order == targetOrder.get()) { - transferResult(f, REL_SUCCESS, null, null); - if (!isMaxOrder) { - // If max order is specified and this FlowFile has the max order, don't increment target anymore. - targetOrder.incrementAndGet(); + if (order == targetOrder.get()) { + transferResult(f, REL_SUCCESS, null, null); + if (!isMaxOrder) { + // If max order is specified and this FlowFile has the max order, don't increment target anymore. + targetOrder.incrementAndGet(); + } + + } else if (order > targetOrder.get()) { + + if (now - Long.parseLong(f.getAttribute(ATTR_STARTED_AT)) > waitTimeoutMillis) { + transferResult(f, REL_OVERTOOK, null, targetOrder.get()); + targetOrder.set(isMaxOrder ? order : order + 1); + } else { + transferResult(f, REL_WAIT, null, targetOrder.get()); + } + + } else { + final String msg = String.format("Skipped, FlowFile order was %d but current target is %d", order, targetOrder.get()); + logger.warn("{}. {}", msg, f); + transferResult(f, REL_SKIPPED, msg, targetOrder.get()); + } + + }); + + if (previousTargetOrder != targetOrder.get()) { + groupStates.put(stateKeyOrder, String.valueOf(targetOrder.get())); + groupStates.put(STATE_UPDATED_AT.apply(groupId), String.valueOf(now)); } - - } else if (order > targetOrder.get()) { - - if (now - Long.parseLong(f.getAttribute(ATTR_STARTED_AT)) > waitTimeoutMillis) { - transferResult(f, REL_OVERTOOK, null, targetOrder.get()); - targetOrder.set(isMaxOrder ? order : order + 1); - } else { - transferResult(f, REL_WAIT, null, targetOrder.get()); - } - - } else { - final String msg = String.format("Skipped, FlowFile order was %d but current target is %d", order, targetOrder.get()); - logger.warn("{}. {}", msg, f); - transferResult(f, REL_SKIPPED, msg, targetOrder.get()); - } - - }); - - if (previousTargetOrder != targetOrder.get()) { - groupStates.put(stateKeyOrder, String.valueOf(targetOrder.get())); - groupStates.put(STATE_UPDATED_AT.apply(groupId), String.valueOf(now)); - } - }); + }); } private void transferResult(final FlowFile flowFile, final Relationship result, final String detail, final Integer expectedOrder) { @@ -533,7 +529,7 @@ public class EnforceOrder extends AbstractProcessor { final List inactiveGroups = groupStates.keySet().stream() .filter(k -> k.endsWith(STATE_SUFFIX_UPDATED_AT) && (now - Long.parseLong(groupStates.get(k)) > inactiveTimeout)) .map(k -> k.substring(0, k.length() - STATE_SUFFIX_UPDATED_AT.length())) - .collect(Collectors.toList()); + .toList(); inactiveGroups.forEach(groupId -> { groupStates.remove(STATE_TARGET_ORDER.apply(groupId)); groupStates.remove(STATE_UPDATED_AT.apply(groupId)); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java index 11b466eda2..ab5f9445b0 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateJsonPath.java @@ -41,7 +41,6 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; @@ -52,7 +51,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Queue; @@ -126,6 +124,14 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor { .dependsOn(DESTINATION, DESTINATION_ATTRIBUTE) .build(); + private static final List PROPERTIES = List.of( + DESTINATION, + RETURN_TYPE, + PATH_NOT_FOUND, + NULL_VALUE_DEFAULT_REPRESENTATION, + MAX_STRING_LENGTH + ); + public static final Relationship REL_MATCH = new Relationship.Builder() .name("matched") .description("FlowFiles are routed to this relationship when the JsonPath is successfully evaluated and the FlowFile is modified as a result") @@ -140,8 +146,11 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor { + "FlowFile; for instance, if the FlowFile is not valid JSON") .build(); - private Set relationships; - private List properties; + private static final Set RELATIONSHIPS = Set.of( + REL_MATCH, + REL_NO_MATCH, + REL_FAILURE + ); private final ConcurrentMap cachedJsonPathMap = new ConcurrentHashMap<>(); @@ -152,23 +161,6 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor { private volatile String nullDefaultValue; private volatile Configuration jsonPathConfiguration; - @Override - protected void init(final ProcessorInitializationContext context) { - final Set rels = new HashSet<>(); - rels.add(REL_MATCH); - rels.add(REL_NO_MATCH); - rels.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(rels); - - final List props = new ArrayList<>(); - props.add(DESTINATION); - props.add(RETURN_TYPE); - props.add(PATH_NOT_FOUND); - props.add(NULL_VALUE_DEFAULT_REPRESENTATION); - props.add(MAX_STRING_LENGTH); - this.properties = Collections.unmodifiableList(props); - } - @Override protected Collection customValidate(final ValidationContext context) { final List results = new ArrayList<>(super.customValidate(context)); @@ -194,12 +186,12 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor { @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java index 17f889a4a9..aacfa9bb2f 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXPath.java @@ -16,33 +16,8 @@ */ package org.apache.nifi.processors.standard; -import static javax.xml.xpath.XPathConstants.NODESET; -import static javax.xml.xpath.XPathConstants.STRING; - -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; -import java.io.ByteArrayOutputStream; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; - -import javax.xml.namespace.QName; -import javax.xml.transform.Source; -import javax.xml.transform.dom.DOMSource; -import javax.xml.transform.stream.StreamResult; -import javax.xml.xpath.XPathExpression; -import javax.xml.xpath.XPathExpressionException; -import javax.xml.xpath.XPathFactory; - +import net.sf.saxon.xpath.XPathEvaluator; +import net.sf.saxon.xpath.XPathFactoryImpl; import org.apache.nifi.annotation.behavior.DynamicProperty; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; @@ -65,7 +40,6 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processors.standard.xml.DocumentTypeAllowedDocumentProvider; @@ -73,12 +47,33 @@ import org.apache.nifi.xml.processing.ProcessingException; import org.apache.nifi.xml.processing.parsers.StandardDocumentProvider; import org.apache.nifi.xml.processing.transform.StandardTransformProvider; import org.w3c.dom.Document; - -import net.sf.saxon.xpath.XPathEvaluator; -import net.sf.saxon.xpath.XPathFactoryImpl; import org.w3c.dom.Node; import org.w3c.dom.NodeList; +import javax.xml.namespace.QName; +import javax.xml.transform.Source; +import javax.xml.transform.dom.DOMSource; +import javax.xml.transform.stream.StreamResult; +import javax.xml.xpath.XPathExpression; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.ByteArrayOutputStream; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + +import static javax.xml.xpath.XPathConstants.NODESET; +import static javax.xml.xpath.XPathConstants.STRING; + @SideEffectFree @SupportsBatching @Tags({"XML", "evaluate", "XPath"}) @@ -136,6 +131,12 @@ public class EvaluateXPath extends AbstractProcessor { .defaultValue("false") .build(); + private static final List PROPERTIES = List.of( + DESTINATION, + RETURN_TYPE, + VALIDATE_DTD + ); + public static final Relationship REL_MATCH = new Relationship.Builder() .name("matched") .description("FlowFiles are routed to this relationship " @@ -153,26 +154,14 @@ public class EvaluateXPath extends AbstractProcessor { + "Type is 'nodeset' and the XPath evaluates to multiple nodes") .build(); - private Set relationships; - private List properties; + private static final Set RELATIONSHIPS = Set.of( + REL_MATCH, + REL_NO_MATCH, + REL_FAILURE + ); private final AtomicReference factoryRef = new AtomicReference<>(); - @Override - protected void init(final ProcessorInitializationContext context) { - final Set relationships = new HashSet<>(); - relationships.add(REL_MATCH); - relationships.add(REL_NO_MATCH); - relationships.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(relationships); - - final List properties = new ArrayList<>(); - properties.add(DESTINATION); - properties.add(RETURN_TYPE); - properties.add(VALIDATE_DTD); - this.properties = Collections.unmodifiableList(properties); - } - @Override protected Collection customValidate(final ValidationContext context) { final List results = new ArrayList<>(super.customValidate(context)); @@ -198,12 +187,12 @@ public class EvaluateXPath extends AbstractProcessor { @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @OnScheduled @@ -330,7 +319,7 @@ public class EvaluateXPath extends AbstractProcessor { ByteArrayOutputStream baos = new ByteArrayOutputStream(); final StreamResult streamResult = new StreamResult(baos); transformProvider.transform(sourceNode, streamResult); - xpathResults.put(entry.getKey(), new String(baos.toByteArray(), StandardCharsets.UTF_8)); + xpathResults.put(entry.getKey(), baos.toString(StandardCharsets.UTF_8)); } catch (final ProcessingException e) { error.set(e); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java index 07fbdf2717..89c0e7eb7d 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/EvaluateXQuery.java @@ -16,24 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; -import javax.xml.transform.dom.DOMSource; -import javax.xml.transform.stream.StreamResult; import net.sf.saxon.s9api.Processor; import net.sf.saxon.s9api.SaxonApiException; import net.sf.saxon.s9api.XQueryCompiler; @@ -63,7 +45,6 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; @@ -73,6 +54,23 @@ import org.apache.nifi.xml.processing.parsers.StandardDocumentProvider; import org.apache.nifi.xml.processing.transform.StandardTransformProvider; import org.w3c.dom.Document; +import javax.xml.transform.dom.DOMSource; +import javax.xml.transform.stream.StreamResult; +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + @SideEffectFree @SupportsBatching @Tags({"XML", "evaluate", "XPath", "XQuery"}) @@ -151,6 +149,14 @@ public class EvaluateXQuery extends AbstractProcessor { .defaultValue("false") .build(); + private static final List PROPERTIES = List.of( + DESTINATION, + XML_OUTPUT_METHOD, + XML_OUTPUT_OMIT_XML_DECLARATION, + XML_OUTPUT_INDENT, + VALIDATE_DTD + ); + public static final Relationship REL_MATCH = new Relationship.Builder() .name("matched") .description("FlowFiles are routed to this relationship when the XQuery is successfully evaluated and the FlowFile " @@ -169,25 +175,11 @@ public class EvaluateXQuery extends AbstractProcessor { + "the FlowFile.") .build(); - private Set relationships; - private List properties; - - @Override - protected void init(final ProcessorInitializationContext context) { - final Set relationships = new HashSet<>(); - relationships.add(REL_MATCH); - relationships.add(REL_NO_MATCH); - relationships.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(relationships); - - final List properties = new ArrayList<>(); - properties.add(DESTINATION); - properties.add(XML_OUTPUT_METHOD); - properties.add(XML_OUTPUT_OMIT_XML_DECLARATION); - properties.add(XML_OUTPUT_INDENT); - properties.add(VALIDATE_DTD); - this.properties = Collections.unmodifiableList(properties); - } + private static final Set RELATIONSHIPS = Set.of( + REL_MATCH, + REL_NO_MATCH, + REL_FAILURE + ); @Override protected Collection customValidate(final ValidationContext context) { @@ -211,12 +203,12 @@ public class EvaluateXQuery extends AbstractProcessor { @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override @@ -303,7 +295,7 @@ public class EvaluateXQuery extends AbstractProcessor { xQueryResults.put(attributeName, value); } } else { // if (DESTINATION_CONTENT.equals(destination)){ - if (result.size() == 0) { + if (result.isEmpty()) { logger.info("No XQuery results found {}", flowFile); session.transfer(flowFile, REL_NO_MATCH); continue flowFileLoop; diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java index 7a86fb766a..73f529b805 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteProcess.java @@ -53,7 +53,6 @@ import java.io.InputStreamReader; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -156,11 +155,23 @@ public class ExecuteProcess extends AbstractProcessor { .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); + private static final List PROPERTIES = List.of( + COMMAND, + COMMAND_ARGUMENTS, + BATCH_DURATION, + REDIRECT_ERROR_STREAM, + WORKING_DIR, + ARG_DELIMITER, + MIME_TYPE + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("All created FlowFiles are routed to this relationship") .build(); + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); + private volatile Process externalProcess; private volatile ExecutorService executor; @@ -170,20 +181,12 @@ public class ExecuteProcess extends AbstractProcessor { @Override public Set getRelationships() { - return Collections.singleton(REL_SUCCESS); + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(); - properties.add(COMMAND); - properties.add(COMMAND_ARGUMENTS); - properties.add(BATCH_DURATION); - properties.add(REDIRECT_ERROR_STREAM); - properties.add(WORKING_DIR); - properties.add(ARG_DELIMITER); - properties.add(MIME_TYPE); - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java index dcc4fd0008..fb806eaed3 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQL.java @@ -33,15 +33,11 @@ import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processors.standard.sql.DefaultAvroSqlWriter; import org.apache.nifi.processors.standard.sql.SqlWriter; import org.apache.nifi.util.db.JdbcCommon; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; @@ -147,27 +143,26 @@ public class ExecuteSQL extends AbstractExecuteSQL { .build(); public ExecuteSQL() { - final Set r = new HashSet<>(); - r.add(REL_SUCCESS); - r.add(REL_FAILURE); - relationships = Collections.unmodifiableSet(r); - - final List pds = new ArrayList<>(); - pds.add(DBCP_SERVICE); - pds.add(SQL_PRE_QUERY); - pds.add(SQL_SELECT_QUERY); - pds.add(SQL_POST_QUERY); - pds.add(QUERY_TIMEOUT); - pds.add(NORMALIZE_NAMES_FOR_AVRO); - pds.add(USE_AVRO_LOGICAL_TYPES); - pds.add(COMPRESSION_FORMAT); - pds.add(DEFAULT_PRECISION); - pds.add(DEFAULT_SCALE); - pds.add(MAX_ROWS_PER_FLOW_FILE); - pds.add(OUTPUT_BATCH_SIZE); - pds.add(FETCH_SIZE); - pds.add(AUTO_COMMIT); - propDescriptors = Collections.unmodifiableList(pds); + relationships = Set.of( + REL_SUCCESS, + REL_FAILURE + ); + propDescriptors = List.of( + DBCP_SERVICE, + SQL_PRE_QUERY, + SQL_SELECT_QUERY, + SQL_POST_QUERY, + QUERY_TIMEOUT, + NORMALIZE_NAMES_FOR_AVRO, + USE_AVRO_LOGICAL_TYPES, + COMPRESSION_FORMAT, + DEFAULT_PRECISION, + DEFAULT_SCALE, + MAX_ROWS_PER_FLOW_FILE, + OUTPUT_BATCH_SIZE, + FETCH_SIZE, + AUTO_COMMIT + ); } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQLRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQLRecord.java index bb255e6ab3..7e802d2bfc 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQLRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteSQLRecord.java @@ -39,9 +39,7 @@ import org.apache.nifi.processors.standard.sql.SqlWriter; import org.apache.nifi.serialization.RecordSetWriterFactory; import org.apache.nifi.util.db.JdbcCommon; -import java.util.ArrayList; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; @@ -152,28 +150,31 @@ public class ExecuteSQLRecord extends AbstractExecuteSQL { .required(true) .build(); - public ExecuteSQLRecord() { - final Set r = new HashSet<>(); - r.add(REL_SUCCESS); - r.add(REL_FAILURE); - relationships = Collections.unmodifiableSet(r); + private static final List PROPERTIES = List.of( + DBCP_SERVICE, + SQL_PRE_QUERY, + SQL_SELECT_QUERY, + SQL_POST_QUERY, + QUERY_TIMEOUT, + RECORD_WRITER_FACTORY, + NORMALIZE_NAMES, + USE_AVRO_LOGICAL_TYPES, + DEFAULT_PRECISION, + DEFAULT_SCALE, + MAX_ROWS_PER_FLOW_FILE, + OUTPUT_BATCH_SIZE, + FETCH_SIZE, + AUTO_COMMIT + ); - final List pds = new ArrayList<>(); - pds.add(DBCP_SERVICE); - pds.add(SQL_PRE_QUERY); - pds.add(SQL_SELECT_QUERY); - pds.add(SQL_POST_QUERY); - pds.add(QUERY_TIMEOUT); - pds.add(RECORD_WRITER_FACTORY); - pds.add(NORMALIZE_NAMES); - pds.add(USE_AVRO_LOGICAL_TYPES); - pds.add(DEFAULT_PRECISION); - pds.add(DEFAULT_SCALE); - pds.add(MAX_ROWS_PER_FLOW_FILE); - pds.add(OUTPUT_BATCH_SIZE); - pds.add(FETCH_SIZE); - pds.add(AUTO_COMMIT); - propDescriptors = Collections.unmodifiableList(pds); + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); + + public ExecuteSQLRecord() { + relationships = RELATIONSHIPS; + propDescriptors = PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java index 5f293fcad1..3d9fe9c7cd 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExecuteStreamCommand.java @@ -20,16 +20,15 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.behavior.DynamicProperties; -import org.apache.nifi.annotation.behavior.Restricted; -import org.apache.nifi.annotation.behavior.Restriction; - import org.apache.nifi.annotation.behavior.DynamicProperty; -import org.apache.nifi.annotation.behavior.WritesAttributes; -import org.apache.nifi.annotation.behavior.WritesAttribute; -import org.apache.nifi.annotation.behavior.SupportsSensitiveDynamicProperties; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; +import org.apache.nifi.annotation.behavior.Restricted; +import org.apache.nifi.annotation.behavior.Restriction; import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.behavior.SupportsSensitiveDynamicProperties; +import org.apache.nifi.annotation.behavior.WritesAttribute; +import org.apache.nifi.annotation.behavior.WritesAttributes; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.components.AllowableValue; @@ -66,9 +65,7 @@ import java.io.OutputStream; import java.lang.ProcessBuilder.Redirect; import java.nio.charset.Charset; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -188,8 +185,12 @@ public class ExecuteStreamCommand extends AbstractProcessor { .build(); private final AtomicReference> relationships = new AtomicReference<>(); - private final static Set OUTPUT_STREAM_RELATIONSHIP_SET; - private final static Set ATTRIBUTE_RELATIONSHIP_SET; + private final static Set OUTPUT_STREAM_RELATIONSHIP_SET = Set.of( + OUTPUT_STREAM_RELATIONSHIP, + ORIGINAL_RELATIONSHIP, + NONZERO_STATUS_RELATIONSHIP + ); + private final static Set ATTRIBUTE_RELATIONSHIP_SET = Set.of(ORIGINAL_RELATIONSHIP); private static final Pattern COMMAND_ARGUMENT_PATTERN = Pattern.compile("command\\.argument\\.(?[0-9]+)$"); @@ -286,33 +287,20 @@ public class ExecuteStreamCommand extends AbstractProcessor { .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); - private static final List PROPERTIES; + private static final List PROPERTIES = List.of( + WORKING_DIR, + EXECUTION_COMMAND, + ARGUMENTS_STRATEGY, + EXECUTION_ARGUMENTS, + ARG_DELIMITER, + IGNORE_STDIN, + PUT_OUTPUT_IN_ATTRIBUTE, + PUT_ATTRIBUTE_MAX_LENGTH, + MIME_TYPE + ); + private static final String MASKED_ARGUMENT = "********"; - static { - List props = new ArrayList<>(); - props.add(WORKING_DIR); - props.add(EXECUTION_COMMAND); - props.add(ARGUMENTS_STRATEGY); - props.add(EXECUTION_ARGUMENTS); - props.add(ARG_DELIMITER); - props.add(IGNORE_STDIN); - props.add(PUT_OUTPUT_IN_ATTRIBUTE); - props.add(PUT_ATTRIBUTE_MAX_LENGTH); - props.add(MIME_TYPE); - PROPERTIES = Collections.unmodifiableList(props); - - Set outputStreamRelationships = new HashSet<>(); - outputStreamRelationships.add(OUTPUT_STREAM_RELATIONSHIP); - outputStreamRelationships.add(ORIGINAL_RELATIONSHIP); - outputStreamRelationships.add(NONZERO_STATUS_RELATIONSHIP); - OUTPUT_STREAM_RELATIONSHIP_SET = Collections.unmodifiableSet(outputStreamRelationships); - - Set attributeRelationships = new HashSet<>(); - attributeRelationships.add(ORIGINAL_RELATIONSHIP); - ATTRIBUTE_RELATIONSHIP_SET = Collections.unmodifiableSet(attributeRelationships); - } - private ComponentLog logger; @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractGrok.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractGrok.java index 1bacd0769c..0f989852b1 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractGrok.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractGrok.java @@ -56,9 +56,7 @@ import java.io.InputStream; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -156,6 +154,16 @@ public class ExtractGrok extends AbstractProcessor { .defaultValue("false") .build(); + private final static List PROPERTIES = List.of( + GROK_EXPRESSION, + GROK_PATTERNS, + DESTINATION, + CHARACTER_SET, + MAX_BUFFER_SIZE, + NAMED_CAPTURES_ONLY, + KEEP_EMPTY_CAPTURES + ); + public static final Relationship REL_MATCH = new Relationship.Builder() .name("matched") .description("FlowFiles are routed to this relationship when the Grok Expression is successfully evaluated and the FlowFile is modified as a result") @@ -166,39 +174,24 @@ public class ExtractGrok extends AbstractProcessor { .description("FlowFiles are routed to this relationship when no provided Grok Expression matches the content of the FlowFile") .build(); - private final static List descriptors; - private final static Set relationships; + private final static Set RELATIONSHIPS = Set.of( + REL_MATCH, + REL_NO_MATCH + ); private volatile Grok grok; private final BlockingQueue bufferQueue = new LinkedBlockingQueue<>(); private final AtomicBoolean keepEmptyCaputures = new AtomicBoolean(true); - static { - final Set _relationships = new HashSet<>(); - _relationships.add(REL_MATCH); - _relationships.add(REL_NO_MATCH); - relationships = Collections.unmodifiableSet(_relationships); - - final List _descriptors = new ArrayList<>(); - _descriptors.add(GROK_EXPRESSION); - _descriptors.add(GROK_PATTERNS); - _descriptors.add(DESTINATION); - _descriptors.add(CHARACTER_SET); - _descriptors.add(MAX_BUFFER_SIZE); - _descriptors.add(NAMED_CAPTURES_ONLY); - _descriptors.add(KEEP_EMPTY_CAPTURES); - descriptors = Collections.unmodifiableList(_descriptors); - } - @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override public final List getSupportedPropertyDescriptors() { - return descriptors; + return PROPERTIES; } @OnStopped diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractRecordSchema.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractRecordSchema.java index 8c7c670816..90e9b5edfd 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractRecordSchema.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractRecordSchema.java @@ -42,8 +42,6 @@ import org.apache.nifi.serialization.RecordReaderFactory; import org.apache.nifi.serialization.record.RecordSchema; import java.io.InputStream; -import java.util.Arrays; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -79,6 +77,11 @@ public class ExtractRecordSchema extends AbstractProcessor { .required(true) .build(); + private static final List PROPERTIES = List.of( + RECORD_READER, + SCHEMA_CACHE_SIZE + ); + static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("FlowFiles whose record schemas are successfully extracted will be routed to this relationship") @@ -89,21 +92,21 @@ public class ExtractRecordSchema extends AbstractProcessor { + "the FlowFile will be routed to this relationship") .build(); - static final List properties = Arrays.asList(RECORD_READER, SCHEMA_CACHE_SIZE); + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); private LoadingCache avroSchemaTextCache; @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - relationships.add(REL_FAILURE); - return relationships; + return RELATIONSHIPS; } @OnScheduled diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java index 2e2cb29b7c..5dee52ed9f 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ExtractText.java @@ -231,6 +231,24 @@ public class ExtractText extends AbstractProcessor { .defaultValue("false") .build(); + private static final List PROPERTIES = List.of( + CHARACTER_SET, + MAX_BUFFER_SIZE, + MAX_CAPTURE_GROUP_LENGTH, + CANON_EQ, + CASE_INSENSITIVE, + COMMENTS, + DOTALL, + LITERAL, + MULTILINE, + UNICODE_CASE, + UNICODE_CHARACTER_CLASS, + UNIX_LINES, + INCLUDE_CAPTURE_GROUP_ZERO, + ENABLE_REPEATING_CAPTURE_GROUP, + ENABLE_NAMED_GROUPS + ); + public static final Relationship REL_MATCH = new Relationship.Builder() .name("matched") .description("FlowFiles are routed to this relationship when the Regular Expression is successfully evaluated and the FlowFile is modified as a result") @@ -241,24 +259,10 @@ public class ExtractText extends AbstractProcessor { .description("FlowFiles are routed to this relationship when no provided Regular Expression matches the content of the FlowFile") .build(); - private final Set relationships = Set.of(REL_MATCH, - REL_NO_MATCH); - - private final List properties = List.of(CHARACTER_SET, - MAX_BUFFER_SIZE, - MAX_CAPTURE_GROUP_LENGTH, - CANON_EQ, - CASE_INSENSITIVE, - COMMENTS, - DOTALL, - LITERAL, - MULTILINE, - UNICODE_CASE, - UNICODE_CHARACTER_CLASS, - UNIX_LINES, - INCLUDE_CAPTURE_GROUP_ZERO, - ENABLE_REPEATING_CAPTURE_GROUP, - ENABLE_NAMED_GROUPS); + private static final Set RELATIONSHIPS = Set.of( + REL_MATCH, + REL_NO_MATCH + ); private final BlockingQueue bufferQueue = new LinkedBlockingQueue<>(); private final AtomicReference> compiledPattersMapRef = new AtomicReference<>(); @@ -266,12 +270,12 @@ public class ExtractText extends AbstractProcessor { @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override @@ -481,7 +485,7 @@ public class ExtractText extends AbstractProcessor { } int getCompileFlags(ProcessContext context) { - int flags = (context.getProperty(UNIX_LINES).asBoolean() ? Pattern.UNIX_LINES : 0) + return (context.getProperty(UNIX_LINES).asBoolean() ? Pattern.UNIX_LINES : 0) | (context.getProperty(CASE_INSENSITIVE).asBoolean() ? Pattern.CASE_INSENSITIVE : 0) | (context.getProperty(COMMENTS).asBoolean() ? Pattern.COMMENTS : 0) | (context.getProperty(MULTILINE).asBoolean() ? Pattern.MULTILINE : 0) @@ -490,6 +494,5 @@ public class ExtractText extends AbstractProcessor { | (context.getProperty(UNICODE_CASE).asBoolean() ? Pattern.UNICODE_CASE : 0) | (context.getProperty(CANON_EQ).asBoolean() ? Pattern.CANON_EQ : 0) | (context.getProperty(UNICODE_CHARACTER_CLASS).asBoolean() ? Pattern.UNICODE_CHARACTER_CLASS : 0); - return flags; } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchDistributedMapCache.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchDistributedMapCache.java index 2e0c053662..c6038d520b 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchDistributedMapCache.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchDistributedMapCache.java @@ -34,8 +34,8 @@ import org.apache.nifi.distributed.cache.client.Serializer; import org.apache.nifi.distributed.cache.client.exception.DeserializationException; import org.apache.nifi.distributed.cache.client.exception.SerializationException; import org.apache.nifi.expression.AttributeExpression; -import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.expression.AttributeExpression.ResultType; +import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; @@ -51,13 +51,11 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.stream.Collectors; @SupportsBatching @Tags({"map", "cache", "fetch", "distributed"}) @@ -75,14 +73,14 @@ import java.util.stream.Collectors; "org.apache.nifi.processors.standard.PutDistributedMapCache"}) public class FetchDistributedMapCache extends AbstractProcessor { - public static final PropertyDescriptor PROP_DISTRIBUTED_CACHE_SERVICE = new PropertyDescriptor.Builder() + public static final PropertyDescriptor DISTRIBUTED_CACHE_SERVICE = new PropertyDescriptor.Builder() .name("Distributed Cache Service") .description("The Controller Service that is used to get the cached values.") .required(true) .identifiesControllerService(DistributedMapCacheClient.class) .build(); - public static final PropertyDescriptor PROP_CACHE_ENTRY_IDENTIFIER = new PropertyDescriptor.Builder() + public static final PropertyDescriptor CACHE_ENTRY_IDENTIFIER = new PropertyDescriptor.Builder() .name("Cache Entry Identifier") .description("A comma-delimited list of FlowFile attributes, or the results of Attribute Expression Language statements, which will be evaluated " + "against a FlowFile in order to determine the value(s) used to identify duplicates; it is these values that are cached. NOTE: Only a single " @@ -94,7 +92,7 @@ public class FetchDistributedMapCache extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); - public static final PropertyDescriptor PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE = new PropertyDescriptor.Builder() + public static final PropertyDescriptor PUT_CACHE_VALUE_IN_ATTRIBUTE = new PropertyDescriptor.Builder() .name("Put Cache Value In Attribute") .description("If set, the cache value received will be put into an attribute of the FlowFile instead of a the content of the" + "FlowFile. The attribute key to put to is determined by evaluating value of this property. If multiple Cache Entry Identifiers are selected, " @@ -103,7 +101,7 @@ public class FetchDistributedMapCache extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); - public static final PropertyDescriptor PROP_PUT_ATTRIBUTE_MAX_LENGTH = new PropertyDescriptor.Builder() + public static final PropertyDescriptor PUT_ATTRIBUTE_MAX_LENGTH = new PropertyDescriptor.Builder() .name("Max Length To Put In Attribute") .description("If routing the cache value to an attribute of the FlowFile (by setting the \"Put Cache Value in attribute\" " + "property), the number of characters put to the attribute value will be at most this amount. This is important because " @@ -113,7 +111,7 @@ public class FetchDistributedMapCache extends AbstractProcessor { .defaultValue("256") .build(); - public static final PropertyDescriptor PROP_CHARACTER_SET = new PropertyDescriptor.Builder() + public static final PropertyDescriptor CHARACTER_SET = new PropertyDescriptor.Builder() .name("Character Set") .description("The Character Set in which the cached value is encoded. This will only be used when routing to an attribute.") .required(false) @@ -121,6 +119,14 @@ public class FetchDistributedMapCache extends AbstractProcessor { .defaultValue("UTF-8") .build(); + private static final List PROPERTIES = List.of( + CACHE_ENTRY_IDENTIFIER, + DISTRIBUTED_CACHE_SERVICE, + PUT_CACHE_VALUE_IN_ATTRIBUTE, + PUT_ATTRIBUTE_MAX_LENGTH, + CHARACTER_SET + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("If the cache was successfully communicated with it will be routed to this relationship") @@ -133,40 +139,31 @@ public class FetchDistributedMapCache extends AbstractProcessor { .name("failure") .description("If unable to communicate with the cache or if the cache entry is evaluated to be blank, the FlowFile will be penalized and routed to this relationship") .build(); - private final Set relationships; + + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_NOT_FOUND, + REL_FAILURE + ); private final Serializer keySerializer = new StringSerializer(); private final Deserializer valueDeserializer = new CacheValueDeserializer(); - public FetchDistributedMapCache() { - final Set rels = new HashSet<>(); - rels.add(REL_SUCCESS); - rels.add(REL_NOT_FOUND); - rels.add(REL_FAILURE); - relationships = Collections.unmodifiableSet(rels); - } - @Override protected List getSupportedPropertyDescriptors() { - final List descriptors = new ArrayList<>(); - descriptors.add(PROP_CACHE_ENTRY_IDENTIFIER); - descriptors.add(PROP_DISTRIBUTED_CACHE_SERVICE); - descriptors.add(PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE); - descriptors.add(PROP_PUT_ATTRIBUTE_MAX_LENGTH); - descriptors.add(PROP_CHARACTER_SET); - return descriptors; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected Collection customValidate(ValidationContext validationContext) { List results = new ArrayList<>(super.customValidate(validationContext)); - PropertyValue cacheEntryIdentifier = validationContext.getProperty(PROP_CACHE_ENTRY_IDENTIFIER); + PropertyValue cacheEntryIdentifier = validationContext.getProperty(CACHE_ENTRY_IDENTIFIER); boolean elPresent = false; try { elPresent = cacheEntryIdentifier.isExpressionLanguagePresent(); @@ -180,7 +177,7 @@ public class FetchDistributedMapCache extends AbstractProcessor { // or a single EL statement with commas inside it but that evaluates to a single item. results.add(new ValidationResult.Builder().valid(true).explanation("Contains Expression Language").build()); } else { - if (!validationContext.getProperty(FetchDistributedMapCache.PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE).isSet()) { + if (!validationContext.getProperty(FetchDistributedMapCache.PUT_CACHE_VALUE_IN_ATTRIBUTE).isSet()) { String identifierString = cacheEntryIdentifier.getValue(); if (identifierString.contains(",")) { results.add(new ValidationResult.Builder().valid(false) @@ -199,7 +196,7 @@ public class FetchDistributedMapCache extends AbstractProcessor { } final ComponentLog logger = getLogger(); - final String cacheKey = context.getProperty(PROP_CACHE_ENTRY_IDENTIFIER).evaluateAttributeExpressions(flowFile).getValue(); + final String cacheKey = context.getProperty(CACHE_ENTRY_IDENTIFIER).evaluateAttributeExpressions(flowFile).getValue(); // This block retains the previous behavior when only one Cache Entry Identifier was allowed, so as not to change the expected error message if (StringUtils.isBlank(cacheKey)) { logger.error("FlowFile {} has no attribute for given Cache Entry Identifier", flowFile); @@ -207,7 +204,7 @@ public class FetchDistributedMapCache extends AbstractProcessor { session.transfer(flowFile, REL_FAILURE); return; } - List cacheKeys = Arrays.stream(cacheKey.split(",")).filter(path -> !StringUtils.isEmpty(path)).map(String::trim).collect(Collectors.toList()); + List cacheKeys = Arrays.stream(cacheKey.split(",")).filter(path -> !StringUtils.isEmpty(path)).map(String::trim).toList(); for (int i = 0; i < cacheKeys.size(); i++) { if (StringUtils.isBlank(cacheKeys.get(i))) { // Log first missing identifier, route to failure, and return @@ -218,14 +215,14 @@ public class FetchDistributedMapCache extends AbstractProcessor { } } - final DistributedMapCacheClient cache = context.getProperty(PROP_DISTRIBUTED_CACHE_SERVICE).asControllerService(DistributedMapCacheClient.class); + final DistributedMapCacheClient cache = context.getProperty(DISTRIBUTED_CACHE_SERVICE).asControllerService(DistributedMapCacheClient.class); try { final Map cacheValues; final boolean singleKey = cacheKeys.size() == 1; if (singleKey) { cacheValues = new HashMap<>(1); - cacheValues.put(cacheKeys.get(0), cache.get(cacheKey, keySerializer, valueDeserializer)); + cacheValues.put(cacheKeys.getFirst(), cache.get(cacheKey, keySerializer, valueDeserializer)); } else { cacheValues = cache.subMap(new HashSet<>(cacheKeys), keySerializer, valueDeserializer); } @@ -238,16 +235,16 @@ public class FetchDistributedMapCache extends AbstractProcessor { notFound = true; break; } else { - boolean putInAttribute = context.getProperty(PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE).isSet(); + boolean putInAttribute = context.getProperty(PUT_CACHE_VALUE_IN_ATTRIBUTE).isSet(); if (putInAttribute) { - String attributeName = context.getProperty(PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE).evaluateAttributeExpressions(flowFile).getValue(); + String attributeName = context.getProperty(PUT_CACHE_VALUE_IN_ATTRIBUTE).evaluateAttributeExpressions(flowFile).getValue(); if (!singleKey) { // Append key to attribute name if multiple keys attributeName += "." + cacheValueEntry.getKey(); } - String attributeValue = new String(cacheValue, context.getProperty(PROP_CHARACTER_SET).getValue()); + String attributeValue = new String(cacheValue, context.getProperty(CHARACTER_SET).getValue()); - int maxLength = context.getProperty(PROP_PUT_ATTRIBUTE_MAX_LENGTH).asInteger(); + int maxLength = context.getProperty(PUT_ATTRIBUTE_MAX_LENGTH).asInteger(); if (maxLength < attributeValue.length()) { attributeValue = attributeValue.substring(0, maxLength); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFTP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFTP.java index 3d1b6a25c8..12d4fb230f 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFTP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFTP.java @@ -17,9 +17,6 @@ package org.apache.nifi.processors.standard; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.WritesAttribute; @@ -34,8 +31,12 @@ import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.util.file.transfer.FetchFileTransfer; -import org.apache.nifi.processors.standard.util.FTPTransfer; import org.apache.nifi.processor.util.file.transfer.FileTransfer; +import org.apache.nifi.processors.standard.util.FTPTransfer; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; // Note that we do not use @SupportsBatching annotation. This processor cannot support batching because it must ensure that session commits happen before remote files are deleted. @@ -83,34 +84,37 @@ import org.apache.nifi.processor.util.file.transfer.FileTransfer; ) public class FetchFTP extends FetchFileTransfer { + private static final PropertyDescriptor PORT = + new PropertyDescriptor.Builder().fromPropertyDescriptor(UNDEFAULTED_PORT).defaultValue("21").build(); + + private static final List PROPERTIES = List.of( + HOSTNAME, + PORT, + USERNAME, + FTPTransfer.PASSWORD, + REMOTE_FILENAME, + COMPLETION_STRATEGY, + MOVE_DESTINATION_DIR, + MOVE_CREATE_DIRECTORY, + FTPTransfer.CONNECTION_TIMEOUT, + FTPTransfer.DATA_TIMEOUT, + FTPTransfer.USE_COMPRESSION, + FTPTransfer.CONNECTION_MODE, + FTPTransfer.TRANSFER_MODE, + FTPTransfer.PROXY_CONFIGURATION_SERVICE, + FTPTransfer.PROXY_TYPE, + FTPTransfer.PROXY_HOST, + FTPTransfer.PROXY_PORT, + FTPTransfer.HTTP_PROXY_USERNAME, + FTPTransfer.HTTP_PROXY_PASSWORD, + FTPTransfer.BUFFER_SIZE, + FILE_NOT_FOUND_LOG_LEVEL, + FTPTransfer.UTF8_ENCODING + ); + @Override protected List getSupportedPropertyDescriptors() { - final PropertyDescriptor port = new PropertyDescriptor.Builder().fromPropertyDescriptor(UNDEFAULTED_PORT).defaultValue("21").build(); - - final List properties = new ArrayList<>(); - properties.add(HOSTNAME); - properties.add(port); - properties.add(USERNAME); - properties.add(FTPTransfer.PASSWORD); - properties.add(REMOTE_FILENAME); - properties.add(COMPLETION_STRATEGY); - properties.add(MOVE_DESTINATION_DIR); - properties.add(MOVE_CREATE_DIRECTORY); - properties.add(FTPTransfer.CONNECTION_TIMEOUT); - properties.add(FTPTransfer.DATA_TIMEOUT); - properties.add(FTPTransfer.USE_COMPRESSION); - properties.add(FTPTransfer.CONNECTION_MODE); - properties.add(FTPTransfer.TRANSFER_MODE); - properties.add(FTPTransfer.PROXY_CONFIGURATION_SERVICE); - properties.add(FTPTransfer.PROXY_TYPE); - properties.add(FTPTransfer.PROXY_HOST); - properties.add(FTPTransfer.PROXY_PORT); - properties.add(FTPTransfer.HTTP_PROXY_USERNAME); - properties.add(FTPTransfer.HTTP_PROXY_PASSWORD); - properties.add(FTPTransfer.BUFFER_SIZE); - properties.add(FILE_NOT_FOUND_LOG_LEVEL); - properties.add(FTPTransfer.UTF8_ENCODING); - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java index f9f75b043f..e16aaf7aa3 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchFile.java @@ -53,7 +53,6 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Collection; -import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; @@ -177,6 +176,15 @@ public class FetchFile extends AbstractProcessor { .required(true) .build(); + private static final List PROPERTIES = List.of( + FILENAME, + COMPLETION_STRATEGY, + MOVE_DESTINATION_DIR, + CONFLICT_STRATEGY, + FILE_NOT_FOUND_LOG_LEVEL, + PERM_DENIED_LOG_LEVEL + ); + static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("Any FlowFile that is successfully fetched from the file system will be transferred to this Relationship.") @@ -195,26 +203,21 @@ public class FetchFile extends AbstractProcessor { "Any FlowFile that could not be fetched from the file system for any reason other than insufficient permissions or the file not existing will be transferred to this Relationship.") .build(); + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_NOT_FOUND, + REL_PERMISSION_DENIED, + REL_FAILURE + ); + @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(); - properties.add(FILENAME); - properties.add(COMPLETION_STRATEGY); - properties.add(MOVE_DESTINATION_DIR); - properties.add(CONFLICT_STRATEGY); - properties.add(FILE_NOT_FOUND_LOG_LEVEL); - properties.add(PERM_DENIED_LOG_LEVEL); - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - relationships.add(REL_NOT_FOUND); - relationships.add(REL_PERMISSION_DENIED); - relationships.add(REL_FAILURE); - return relationships; + return RELATIONSHIPS; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchSFTP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchSFTP.java index 7ab3a16e25..2ccca3a8be 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchSFTP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FetchSFTP.java @@ -17,9 +17,6 @@ package org.apache.nifi.processors.standard; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.WritesAttribute; @@ -34,10 +31,14 @@ import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.util.file.transfer.FetchFileTransfer; -import org.apache.nifi.processors.standard.util.FTPTransfer; import org.apache.nifi.processor.util.file.transfer.FileTransfer; +import org.apache.nifi.processors.standard.util.FTPTransfer; import org.apache.nifi.processors.standard.util.SFTPTransfer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + // Note that we do not use @SupportsBatching annotation. This processor cannot support batching because it must ensure that session commits happen before remote files are deleted. @InputRequirement(Requirement.INPUT_REQUIRED) @Tags({"sftp", "get", "retrieve", "files", "fetch", "remote", "ingest", "source", "input"}) @@ -83,48 +84,52 @@ import org.apache.nifi.processors.standard.util.SFTPTransfer; ) public class FetchSFTP extends FetchFileTransfer { + private static final PropertyDescriptor PORT = + new PropertyDescriptor.Builder().fromPropertyDescriptor(UNDEFAULTED_PORT).defaultValue("22").build(); + + private static final PropertyDescriptor DISABLE_DIRECTORY_LISTING = new PropertyDescriptor.Builder() + .fromPropertyDescriptor(SFTPTransfer.DISABLE_DIRECTORY_LISTING) + .description(String.format("Control how '%s' is created when '%s' is '%s' and '%s' is enabled. %s", + MOVE_DESTINATION_DIR.getDisplayName(), + COMPLETION_STRATEGY.getDisplayName(), + COMPLETION_MOVE.getDisplayName(), + MOVE_CREATE_DIRECTORY.getDisplayName(), + SFTPTransfer.DISABLE_DIRECTORY_LISTING.getDescription())).build(); + + private static final List PROPERTIES = List.of( + HOSTNAME, + PORT, + USERNAME, + SFTPTransfer.PASSWORD, + SFTPTransfer.PRIVATE_KEY_PATH, + SFTPTransfer.PRIVATE_KEY_PASSPHRASE, + REMOTE_FILENAME, + COMPLETION_STRATEGY, + MOVE_DESTINATION_DIR, + MOVE_CREATE_DIRECTORY, + DISABLE_DIRECTORY_LISTING, + SFTPTransfer.CONNECTION_TIMEOUT, + SFTPTransfer.DATA_TIMEOUT, + SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT, + SFTPTransfer.HOST_KEY_FILE, + SFTPTransfer.STRICT_HOST_KEY_CHECKING, + SFTPTransfer.USE_COMPRESSION, + SFTPTransfer.PROXY_CONFIGURATION_SERVICE, + FTPTransfer.PROXY_TYPE, + FTPTransfer.PROXY_HOST, + FTPTransfer.PROXY_PORT, + FTPTransfer.HTTP_PROXY_USERNAME, + FTPTransfer.HTTP_PROXY_PASSWORD, + FILE_NOT_FOUND_LOG_LEVEL, + SFTPTransfer.CIPHERS_ALLOWED, + SFTPTransfer.KEY_ALGORITHMS_ALLOWED, + SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED, + SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED + ); + @Override protected List getSupportedPropertyDescriptors() { - final PropertyDescriptor port = new PropertyDescriptor.Builder().fromPropertyDescriptor(UNDEFAULTED_PORT).defaultValue("22").build(); - final PropertyDescriptor disableDirectoryListing = new PropertyDescriptor.Builder() - .fromPropertyDescriptor(SFTPTransfer.DISABLE_DIRECTORY_LISTING) - .description(String.format("Control how '%s' is created when '%s' is '%s' and '%s' is enabled. %s", - MOVE_DESTINATION_DIR.getDisplayName(), - COMPLETION_STRATEGY.getDisplayName(), - COMPLETION_MOVE.getDisplayName(), - MOVE_CREATE_DIRECTORY.getDisplayName(), - SFTPTransfer.DISABLE_DIRECTORY_LISTING.getDescription())).build(); - - final List properties = new ArrayList<>(); - properties.add(HOSTNAME); - properties.add(port); - properties.add(USERNAME); - properties.add(SFTPTransfer.PASSWORD); - properties.add(SFTPTransfer.PRIVATE_KEY_PATH); - properties.add(SFTPTransfer.PRIVATE_KEY_PASSPHRASE); - properties.add(REMOTE_FILENAME); - properties.add(COMPLETION_STRATEGY); - properties.add(MOVE_DESTINATION_DIR); - properties.add(MOVE_CREATE_DIRECTORY); - properties.add(disableDirectoryListing); - properties.add(SFTPTransfer.CONNECTION_TIMEOUT); - properties.add(SFTPTransfer.DATA_TIMEOUT); - properties.add(SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT); - properties.add(SFTPTransfer.HOST_KEY_FILE); - properties.add(SFTPTransfer.STRICT_HOST_KEY_CHECKING); - properties.add(SFTPTransfer.USE_COMPRESSION); - properties.add(SFTPTransfer.PROXY_CONFIGURATION_SERVICE); - properties.add(FTPTransfer.PROXY_TYPE); - properties.add(FTPTransfer.PROXY_HOST); - properties.add(FTPTransfer.PROXY_PORT); - properties.add(FTPTransfer.HTTP_PROXY_USERNAME); - properties.add(FTPTransfer.HTTP_PROXY_PASSWORD); - properties.add(FILE_NOT_FOUND_LOG_LEVEL); - properties.add(SFTPTransfer.CIPHERS_ALLOWED); - properties.add(SFTPTransfer.KEY_ALGORITHMS_ALLOWED); - properties.add(SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED); - properties.add(SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED); - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FilterAttribute.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FilterAttribute.java index 42bb074987..31b68fafd0 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FilterAttribute.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FilterAttribute.java @@ -71,7 +71,7 @@ public class FilterAttribute extends AbstractProcessor { .description("All successful FlowFiles are routed to this relationship") .build(); - private final static Set relationships = Set.of(REL_SUCCESS); + private final static Set RELATIONSHIPS = Set.of(REL_SUCCESS); public static final PropertyDescriptor FILTER_MODE = new PropertyDescriptor.Builder() .name("Filter Mode") @@ -113,21 +113,25 @@ public class FilterAttribute extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); - private final static String DELIMITER_VALUE = ","; + private final static List PROPERTIES = List.of( + FILTER_MODE, + MATCHING_STRATEGY, + ATTRIBUTE_ENUMERATION, + ATTRIBUTE_PATTERN + ); - private final static List properties = - List.of(FILTER_MODE, MATCHING_STRATEGY, ATTRIBUTE_ENUMERATION, ATTRIBUTE_PATTERN); + private final static String DELIMITER_VALUE = ","; private volatile Predicate cachedMatchingPredicate; @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FlattenJson.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FlattenJson.java index 261d160b2e..2467ad95b8 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FlattenJson.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/FlattenJson.java @@ -24,12 +24,12 @@ import com.github.wnameless.json.flattener.PrintMode; import com.github.wnameless.json.unflattener.JsonUnflattener; import org.apache.commons.io.IOUtils; import org.apache.commons.text.StringEscapeUtils; -import org.apache.nifi.annotation.behavior.SideEffectFree; -import org.apache.nifi.annotation.documentation.CapabilityDescription; -import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; +import org.apache.nifi.annotation.behavior.SideEffectFree; import org.apache.nifi.annotation.behavior.SupportsBatching; +import org.apache.nifi.annotation.documentation.CapabilityDescription; +import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.components.AllowableValue; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationResult; @@ -39,16 +39,12 @@ import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import java.io.IOException; import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -162,6 +158,15 @@ public class FlattenJson extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.NONE) .build(); + private static final List PROPERTIES = List.of( + SEPARATOR, + FLATTEN_MODE, + IGNORE_RESERVED_CHARACTERS, + RETURN_TYPE, + CHARACTER_SET, + PRETTY_PRINT + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .description("Successfully flattened/unflattened files go to this relationship.") .name("success") @@ -172,35 +177,19 @@ public class FlattenJson extends AbstractProcessor { .name("failure") .build(); - private List properties; - private Set relationships; - - @Override - protected void init(final ProcessorInitializationContext context) { - List props = new ArrayList<>(); - props.add(SEPARATOR); - props.add(FLATTEN_MODE); - props.add(IGNORE_RESERVED_CHARACTERS); - props.add(RETURN_TYPE); - props.add(CHARACTER_SET); - props.add(PRETTY_PRINT); - properties = Collections.unmodifiableList(props); - - Set rels = new HashSet<>(); - rels.add(REL_SUCCESS); - rels.add(REL_FAILURE); - - relationships = Collections.unmodifiableSet(rels); - } + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ForkEnrichment.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ForkEnrichment.java index d7fba6d6f9..e796a40b9a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ForkEnrichment.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ForkEnrichment.java @@ -34,10 +34,7 @@ import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processors.standard.enrichment.EnrichmentRole; -import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.UUID; @@ -68,14 +65,14 @@ public class ForkEnrichment extends AbstractProcessor { .description("A clone of the incoming FlowFile will be routed to this relationship, after adding appropriate attributes.") .build(); - private static final Set relationships = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( - REL_ORIGINAL, - REL_ENRICHMENT - ))); + private static final Set RELATIONSHIPS = Set.of( + REL_ORIGINAL, + REL_ENRICHMENT + ); @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ForkRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ForkRecord.java index 6d5bea67c4..7cc30f19d2 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ForkRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ForkRecord.java @@ -66,7 +66,6 @@ import java.io.OutputStream; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -142,6 +141,13 @@ public class ForkRecord extends AbstractProcessor { .required(true) .build(); + private static final List PROPERTIES = List.of( + RECORD_READER, + RECORD_WRITER, + MODE, + INCLUDE_PARENT_FIELDS + ); + public static final Relationship REL_FORK = new Relationship.Builder() .name("fork") .description("The FlowFiles containing the forked records will be routed to this relationship") @@ -155,23 +161,20 @@ public class ForkRecord extends AbstractProcessor { .description("In case a FlowFile generates an error during the fork operation, it will be routed to this relationship") .build(); + private static final Set RELATIONSHIPS = Set.of( + REL_ORIGINAL, + REL_FAILURE, + REL_FORK + ); + @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(); - properties.add(RECORD_READER); - properties.add(RECORD_WRITER); - properties.add(MODE); - properties.add(INCLUDE_PARENT_FIELDS); - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - final Set relationships = new HashSet<>(); - relationships.add(REL_ORIGINAL); - relationships.add(REL_FAILURE); - relationships.add(REL_FORK); - return relationships; + return RELATIONSHIPS; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java index 7a26369215..f7c7e90937 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateFlowFile.java @@ -16,20 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.IOException; -import java.io.OutputStream; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; - import org.apache.nifi.annotation.behavior.DynamicProperty; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; @@ -51,12 +37,23 @@ import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.io.OutputStreamCallback; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.scheduling.SchedulingStrategy; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + @SupportsBatching @Tags({"test", "random", "generate", "load"}) @InputRequirement(Requirement.INPUT_FORBIDDEN) @@ -132,35 +129,27 @@ public class GenerateFlowFile extends AbstractProcessor { .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); + private static final List PROPERTIES = List.of( + FILE_SIZE, + BATCH_SIZE, + DATA_FORMAT, + UNIQUE_FLOWFILES, + CUSTOM_TEXT, + CHARSET, + MIME_TYPE + ); + public static final Relationship SUCCESS = new Relationship.Builder() .name("success") .build(); - private List descriptors; - private Set relationships; + private static final Set RELATIONSHIPS = Set.of(SUCCESS); private static final char[] TEXT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$%^&*()-_=+/?.,';:\"?<>\n\t ".toCharArray(); - @Override - protected void init(final ProcessorInitializationContext context) { - final List descriptors = new ArrayList<>(); - descriptors.add(FILE_SIZE); - descriptors.add(BATCH_SIZE); - descriptors.add(DATA_FORMAT); - descriptors.add(UNIQUE_FLOWFILES); - descriptors.add(CUSTOM_TEXT); - descriptors.add(CHARSET); - descriptors.add(MIME_TYPE); - this.descriptors = Collections.unmodifiableList(descriptors); - - final Set relationships = new HashSet<>(); - relationships.add(SUCCESS); - this.relationships = Collections.unmodifiableSet(relationships); - } - @Override protected List getSupportedPropertyDescriptors() { - return descriptors; + return PROPERTIES; } @Override @@ -177,7 +166,7 @@ public class GenerateFlowFile extends AbstractProcessor { @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @OnScheduled diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateRecord.java index a8a6cead1c..9049b4413a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateRecord.java @@ -65,7 +65,6 @@ import java.time.LocalDate; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.ArrayList; -import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; @@ -153,25 +152,26 @@ public class GenerateRecord extends AbstractProcessor { .required(false) .build(); + private static final List PROPERTIES = List.of( + RECORD_WRITER, + NUM_RECORDS, + NULLABLE_FIELDS, + NULL_PERCENTAGE, + SCHEMA_TEXT + ); static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("FlowFiles that are successfully created will be routed to this relationship") .build(); - static final Set RELATIONSHIPS = Collections.singleton(REL_SUCCESS); + static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); private volatile Faker faker = new Faker(); @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(); - properties.add(RECORD_WRITER); - properties.add(NUM_RECORDS); - properties.add(NULLABLE_FIELDS); - properties.add(NULL_PERCENTAGE); - properties.add(SCHEMA_TEXT); - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateTableFetch.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateTableFetch.java index 0a796f7b94..56cae799af 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateTableFetch.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GenerateTableFetch.java @@ -60,7 +60,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -165,31 +164,34 @@ public class GenerateTableFetch extends AbstractDatabaseFetchProcessor { .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .build(); + private static final List PROPERTIES = List.of( + DBCP_SERVICE, + DB_TYPE, + TABLE_NAME, + COLUMN_NAMES, + MAX_VALUE_COLUMN_NAMES, + QUERY_TIMEOUT, + PARTITION_SIZE, + COLUMN_FOR_VALUE_PARTITIONING, + WHERE_CLAUSE, + CUSTOM_ORDERBY_COLUMN, + OUTPUT_EMPTY_FLOWFILE_ON_ZERO_RESULTS + ); + public static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description("This relationship is only used when SQL query execution (using an incoming FlowFile) failed. The incoming FlowFile will be penalized and routed to this relationship. " + "If no incoming connection(s) are specified, this relationship is unused.") .build(); - public GenerateTableFetch() { - final Set r = new HashSet<>(); - r.add(REL_SUCCESS); - r.add(REL_FAILURE); - relationships = Collections.unmodifiableSet(r); + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); - final List pds = new ArrayList<>(); - pds.add(DBCP_SERVICE); - pds.add(DB_TYPE); - pds.add(TABLE_NAME); - pds.add(COLUMN_NAMES); - pds.add(MAX_VALUE_COLUMN_NAMES); - pds.add(QUERY_TIMEOUT); - pds.add(PARTITION_SIZE); - pds.add(COLUMN_FOR_VALUE_PARTITIONING); - pds.add(WHERE_CLAUSE); - pds.add(CUSTOM_ORDERBY_COLUMN); - pds.add(OUTPUT_EMPTY_FLOWFILE_ON_ZERO_RESULTS); - propDescriptors = Collections.unmodifiableList(pds); + public GenerateTableFetch() { + propDescriptors = PROPERTIES; + relationships = RELATIONSHIPS; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java index af982b3a74..522ce64f6e 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFTP.java @@ -16,11 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; - import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.WritesAttribute; @@ -32,10 +27,13 @@ import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.processor.ProcessContext; -import org.apache.nifi.processor.ProcessorInitializationContext; +import org.apache.nifi.processor.util.file.transfer.FileTransfer; import org.apache.nifi.processor.util.file.transfer.GetFileTransfer; import org.apache.nifi.processors.standard.util.FTPTransfer; -import org.apache.nifi.processor.util.file.transfer.FileTransfer; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; @InputRequirement(Requirement.INPUT_FORBIDDEN) @Tags({"FTP", "get", "retrieve", "files", "fetch", "remote", "ingest", "source", "input"}) @@ -57,44 +55,39 @@ import org.apache.nifi.processor.util.file.transfer.FileTransfer; @SeeAlso(PutFTP.class) public class GetFTP extends GetFileTransfer { - private List properties; - - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(FTPTransfer.HOSTNAME); - properties.add(FTPTransfer.PORT); - properties.add(FTPTransfer.USERNAME); - properties.add(FTPTransfer.PASSWORD); - properties.add(FTPTransfer.CONNECTION_MODE); - properties.add(FTPTransfer.TRANSFER_MODE); - properties.add(FTPTransfer.REMOTE_PATH); - properties.add(FTPTransfer.FILE_FILTER_REGEX); - properties.add(FTPTransfer.PATH_FILTER_REGEX); - properties.add(FTPTransfer.POLLING_INTERVAL); - properties.add(FTPTransfer.RECURSIVE_SEARCH); - properties.add(FTPTransfer.FOLLOW_SYMLINK); - properties.add(FTPTransfer.IGNORE_DOTTED_FILES); - properties.add(FTPTransfer.DELETE_ORIGINAL); - properties.add(FTPTransfer.CONNECTION_TIMEOUT); - properties.add(FTPTransfer.DATA_TIMEOUT); - properties.add(FTPTransfer.MAX_SELECTS); - properties.add(FTPTransfer.REMOTE_POLL_BATCH_SIZE); - properties.add(FTPTransfer.USE_NATURAL_ORDERING); - properties.add(FTPTransfer.PROXY_CONFIGURATION_SERVICE); - properties.add(FTPTransfer.PROXY_TYPE); - properties.add(FTPTransfer.PROXY_HOST); - properties.add(FTPTransfer.PROXY_PORT); - properties.add(FTPTransfer.HTTP_PROXY_USERNAME); - properties.add(FTPTransfer.HTTP_PROXY_PASSWORD); - properties.add(FTPTransfer.BUFFER_SIZE); - properties.add(FTPTransfer.UTF8_ENCODING); - this.properties = Collections.unmodifiableList(properties); - } + private static final List PROPERTIES = List.of( + FTPTransfer.HOSTNAME, + FTPTransfer.PORT, + FTPTransfer.USERNAME, + FTPTransfer.PASSWORD, + FTPTransfer.CONNECTION_MODE, + FTPTransfer.TRANSFER_MODE, + FTPTransfer.REMOTE_PATH, + FTPTransfer.FILE_FILTER_REGEX, + FTPTransfer.PATH_FILTER_REGEX, + FTPTransfer.POLLING_INTERVAL, + FTPTransfer.RECURSIVE_SEARCH, + FTPTransfer.FOLLOW_SYMLINK, + FTPTransfer.IGNORE_DOTTED_FILES, + FTPTransfer.DELETE_ORIGINAL, + FTPTransfer.CONNECTION_TIMEOUT, + FTPTransfer.DATA_TIMEOUT, + FTPTransfer.MAX_SELECTS, + FTPTransfer.REMOTE_POLL_BATCH_SIZE, + FTPTransfer.USE_NATURAL_ORDERING, + FTPTransfer.PROXY_CONFIGURATION_SERVICE, + FTPTransfer.PROXY_TYPE, + FTPTransfer.PROXY_HOST, + FTPTransfer.PROXY_PORT, + FTPTransfer.HTTP_PROXY_USERNAME, + FTPTransfer.HTTP_PROXY_PASSWORD, + FTPTransfer.BUFFER_SIZE, + FTPTransfer.UTF8_ENCODING + ); @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java index 6226238cb1..f59320a341 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetFile.java @@ -37,7 +37,6 @@ import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; @@ -56,7 +55,6 @@ import java.nio.file.attribute.PosixFilePermissions; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -190,6 +188,21 @@ public class GetFile extends AbstractProcessor { .defaultValue("10") .build(); + private static final List PROPERTIES = List.of( + DIRECTORY, + FILE_FILTER, + PATH_FILTER, + BATCH_SIZE, + KEEP_SOURCE_FILE, + RECURSE, + POLLING_INTERVAL, + IGNORE_HIDDEN_FILES, + MIN_AGE, + MAX_AGE, + MIN_SIZE, + MAX_SIZE + ); + public static final String FILE_CREATION_TIME_ATTRIBUTE = "file.creationTime"; public static final String FILE_LAST_MODIFY_TIME_ATTRIBUTE = "file.lastModifiedTime"; public static final String FILE_LAST_ACCESS_TIME_ATTRIBUTE = "file.lastAccessTime"; @@ -200,8 +213,7 @@ public class GetFile extends AbstractProcessor { public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success").description("All files are routed to success").build(); - private List properties; - private Set relationships; + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); private final BlockingQueue fileQueue = new LinkedBlockingQueue<>(); private final Set inProcess = new HashSet<>(); // guarded by queueLock @@ -212,36 +224,14 @@ public class GetFile extends AbstractProcessor { private final AtomicLong queueLastUpdated = new AtomicLong(0L); - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(DIRECTORY); - properties.add(FILE_FILTER); - properties.add(PATH_FILTER); - properties.add(BATCH_SIZE); - properties.add(KEEP_SOURCE_FILE); - properties.add(RECURSE); - properties.add(POLLING_INTERVAL); - properties.add(IGNORE_HIDDEN_FILES); - properties.add(MIN_AGE); - properties.add(MAX_AGE); - properties.add(MIN_SIZE); - properties.add(MAX_SIZE); - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - this.relationships = Collections.unmodifiableSet(relationships); - } - @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @OnScheduled @@ -446,7 +436,7 @@ public class GetFile extends AbstractProcessor { flowFile = session.putAttribute(flowFile, CoreAttributes.PATH.key(), relativePathString); flowFile = session.putAttribute(flowFile, CoreAttributes.ABSOLUTE_PATH.key(), absPathString); Map attributes = getAttributesFromFile(filePath); - if (attributes.size() > 0) { + if (!attributes.isEmpty()) { flowFile = session.putAllAttributes(flowFile, attributes); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java index 7039a8de64..f9073870cf 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/GetSFTP.java @@ -16,11 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; - import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.WritesAttribute; @@ -32,12 +27,15 @@ import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.processor.ProcessContext; -import org.apache.nifi.processor.ProcessorInitializationContext; +import org.apache.nifi.processor.util.file.transfer.FileTransfer; import org.apache.nifi.processor.util.file.transfer.GetFileTransfer; import org.apache.nifi.processors.standard.util.FTPTransfer; -import org.apache.nifi.processor.util.file.transfer.FileTransfer; import org.apache.nifi.processors.standard.util.SFTPTransfer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + @InputRequirement(Requirement.INPUT_FORBIDDEN) @Tags({"sftp", "get", "retrieve", "files", "fetch", "remote", "ingest", "source", "input"}) @CapabilityDescription("Fetches files from an SFTP Server and creates FlowFiles from them") @@ -56,50 +54,45 @@ import org.apache.nifi.processors.standard.util.SFTPTransfer; @SeeAlso(PutSFTP.class) public class GetSFTP extends GetFileTransfer { - private List properties; - - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(SFTPTransfer.HOSTNAME); - properties.add(SFTPTransfer.PORT); - properties.add(SFTPTransfer.USERNAME); - properties.add(SFTPTransfer.PASSWORD); - properties.add(SFTPTransfer.PRIVATE_KEY_PATH); - properties.add(SFTPTransfer.PRIVATE_KEY_PASSPHRASE); - properties.add(SFTPTransfer.REMOTE_PATH); - properties.add(SFTPTransfer.FILE_FILTER_REGEX); - properties.add(SFTPTransfer.PATH_FILTER_REGEX); - properties.add(SFTPTransfer.POLLING_INTERVAL); - properties.add(SFTPTransfer.RECURSIVE_SEARCH); - properties.add(SFTPTransfer.FOLLOW_SYMLINK); - properties.add(SFTPTransfer.IGNORE_DOTTED_FILES); - properties.add(SFTPTransfer.DELETE_ORIGINAL); - properties.add(SFTPTransfer.CONNECTION_TIMEOUT); - properties.add(SFTPTransfer.DATA_TIMEOUT); - properties.add(SFTPTransfer.HOST_KEY_FILE); - properties.add(SFTPTransfer.MAX_SELECTS); - properties.add(SFTPTransfer.REMOTE_POLL_BATCH_SIZE); - properties.add(SFTPTransfer.STRICT_HOST_KEY_CHECKING); - properties.add(SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT); - properties.add(SFTPTransfer.USE_COMPRESSION); - properties.add(SFTPTransfer.USE_NATURAL_ORDERING); - properties.add(SFTPTransfer.PROXY_CONFIGURATION_SERVICE); - properties.add(FTPTransfer.PROXY_TYPE); - properties.add(FTPTransfer.PROXY_HOST); - properties.add(FTPTransfer.PROXY_PORT); - properties.add(FTPTransfer.HTTP_PROXY_USERNAME); - properties.add(FTPTransfer.HTTP_PROXY_PASSWORD); - properties.add(SFTPTransfer.CIPHERS_ALLOWED); - properties.add(SFTPTransfer.KEY_ALGORITHMS_ALLOWED); - properties.add(SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED); - properties.add(SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED); - this.properties = Collections.unmodifiableList(properties); - } + private static final List PROPERTIES = List.of( + SFTPTransfer.HOSTNAME, + SFTPTransfer.PORT, + SFTPTransfer.USERNAME, + SFTPTransfer.PASSWORD, + SFTPTransfer.PRIVATE_KEY_PATH, + SFTPTransfer.PRIVATE_KEY_PASSPHRASE, + SFTPTransfer.REMOTE_PATH, + SFTPTransfer.FILE_FILTER_REGEX, + SFTPTransfer.PATH_FILTER_REGEX, + SFTPTransfer.POLLING_INTERVAL, + SFTPTransfer.RECURSIVE_SEARCH, + SFTPTransfer.FOLLOW_SYMLINK, + SFTPTransfer.IGNORE_DOTTED_FILES, + SFTPTransfer.DELETE_ORIGINAL, + SFTPTransfer.CONNECTION_TIMEOUT, + SFTPTransfer.DATA_TIMEOUT, + SFTPTransfer.HOST_KEY_FILE, + SFTPTransfer.MAX_SELECTS, + SFTPTransfer.REMOTE_POLL_BATCH_SIZE, + SFTPTransfer.STRICT_HOST_KEY_CHECKING, + SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT, + SFTPTransfer.USE_COMPRESSION, + SFTPTransfer.USE_NATURAL_ORDERING, + SFTPTransfer.PROXY_CONFIGURATION_SERVICE, + FTPTransfer.PROXY_TYPE, + FTPTransfer.PROXY_HOST, + FTPTransfer.PROXY_PORT, + FTPTransfer.HTTP_PROXY_USERNAME, + FTPTransfer.HTTP_PROXY_PASSWORD, + SFTPTransfer.CIPHERS_ALLOWED, + SFTPTransfer.KEY_ALGORITHMS_ALLOWED, + SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED, + SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED + ); @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java index 227069cec9..8ed96ca9fb 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpRequest.java @@ -16,7 +16,15 @@ */ package org.apache.nifi.processors.standard; +import jakarta.servlet.AsyncContext; +import jakarta.servlet.DispatcherType; +import jakarta.servlet.MultipartConfigElement; +import jakarta.servlet.ServletException; +import jakarta.servlet.http.Cookie; import jakarta.servlet.http.HttpServlet; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import jakarta.servlet.http.Part; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; @@ -59,14 +67,6 @@ import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.util.thread.QueuedThreadPool; import javax.net.ssl.SSLContext; -import jakarta.servlet.AsyncContext; -import jakarta.servlet.DispatcherType; -import jakarta.servlet.MultipartConfigElement; -import jakarta.servlet.ServletException; -import jakarta.servlet.http.Cookie; -import jakarta.servlet.http.HttpServletRequest; -import jakarta.servlet.http.HttpServletResponse; -import jakarta.servlet.http.Part; import java.io.IOException; import java.io.OutputStream; import java.io.UnsupportedEncodingException; @@ -74,7 +74,6 @@ import java.net.URI; import java.net.URLDecoder; import java.security.Principal; import java.util.ArrayList; -import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; @@ -90,10 +89,10 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.regex.Pattern; -import static jakarta.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE; +import static jakarta.servlet.http.HttpServletResponse.SC_BAD_REQUEST; import static jakarta.servlet.http.HttpServletResponse.SC_METHOD_NOT_ALLOWED; import static jakarta.servlet.http.HttpServletResponse.SC_NOT_FOUND; -import static jakarta.servlet.http.HttpServletResponse.SC_BAD_REQUEST; +import static jakarta.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE; @InputRequirement(Requirement.INPUT_FORBIDDEN) @Tags({"http", "https", "request", "listen", "ingress", "web service"}) @@ -197,7 +196,7 @@ public class HandleHttpRequest extends AbstractProcessor { .description("HTTP Protocols supported for Application Layer Protocol Negotiation with TLS") .required(true) .allowableValues(HttpProtocolStrategy.class) - .defaultValue(HttpProtocolStrategy.HTTP_1_1.getValue()) + .defaultValue(HttpProtocolStrategy.HTTP_1_1) .dependsOn(SSL_CONTEXT) .build(); public static final PropertyDescriptor URL_CHARACTER_SET = new PropertyDescriptor.Builder() @@ -312,37 +311,36 @@ public class HandleHttpRequest extends AbstractProcessor { .addValidator(StandardValidators.DATA_SIZE_VALIDATOR) .defaultValue("512 KB") .build(); + + private static final List PROPERTIES = List.of( + PORT, + HOSTNAME, + SSL_CONTEXT, + HTTP_PROTOCOL_STRATEGY, + HTTP_CONTEXT_MAP, + PATH_REGEX, + URL_CHARACTER_SET, + ALLOW_GET, + ALLOW_POST, + ALLOW_PUT, + ALLOW_DELETE, + ALLOW_HEAD, + ALLOW_OPTIONS, + MAXIMUM_THREADS, + ADDITIONAL_METHODS, + CLIENT_AUTH, + CONTAINER_QUEUE_SIZE, + MULTIPART_REQUEST_MAX_SIZE, + MULTIPART_READ_BUFFER_SIZE, + PARAMETERS_TO_ATTRIBUTES + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("All content that is received is routed to the 'success' relationship") .build(); - private static final List propertyDescriptors; - - static { - List descriptors = new ArrayList<>(); - descriptors.add(PORT); - descriptors.add(HOSTNAME); - descriptors.add(SSL_CONTEXT); - descriptors.add(HTTP_PROTOCOL_STRATEGY); - descriptors.add(HTTP_CONTEXT_MAP); - descriptors.add(PATH_REGEX); - descriptors.add(URL_CHARACTER_SET); - descriptors.add(ALLOW_GET); - descriptors.add(ALLOW_POST); - descriptors.add(ALLOW_PUT); - descriptors.add(ALLOW_DELETE); - descriptors.add(ALLOW_HEAD); - descriptors.add(ALLOW_OPTIONS); - descriptors.add(MAXIMUM_THREADS); - descriptors.add(ADDITIONAL_METHODS); - descriptors.add(CLIENT_AUTH); - descriptors.add(CONTAINER_QUEUE_SIZE); - descriptors.add(MULTIPART_REQUEST_MAX_SIZE); - descriptors.add(MULTIPART_READ_BUFFER_SIZE); - descriptors.add(PARAMETERS_TO_ATTRIBUTES); - propertyDescriptors = Collections.unmodifiableList(descriptors); - } + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); private volatile Server server; private volatile boolean ready; @@ -354,12 +352,12 @@ public class HandleHttpRequest extends AbstractProcessor { @Override protected List getSupportedPropertyDescriptors() { - return propertyDescriptors; + return PROPERTIES; } @Override public Set getRelationships() { - return Collections.singleton(REL_SUCCESS); + return RELATIONSHIPS; } @OnScheduled @@ -389,7 +387,7 @@ public class HandleHttpRequest extends AbstractProcessor { serverConnectorFactory.setWantClientAuth(wantClientAuth); final SSLContext sslContext = sslService == null ? null : sslService.createContext(); serverConnectorFactory.setSslContext(sslContext); - final HttpProtocolStrategy httpProtocolStrategy = HttpProtocolStrategy.valueOf(context.getProperty(HTTP_PROTOCOL_STRATEGY).getValue()); + final HttpProtocolStrategy httpProtocolStrategy = context.getProperty(HTTP_PROTOCOL_STRATEGY).asAllowableValue(HttpProtocolStrategy.class); serverConnectorFactory.setApplicationLayerProtocols(httpProtocolStrategy.getApplicationLayerProtocols()); final ServerConnector serverConnector = serverConnectorFactory.getServerConnector(); @@ -606,7 +604,7 @@ public class HandleHttpRequest extends AbstractProcessor { request.setAttribute(ServletContextRequest.MULTIPART_CONFIG_ELEMENT, new MultipartConfigElement(tempDir, requestMaxSize, requestMaxSize, readBufferSize)); List parts = null; try { - parts = Collections.unmodifiableList(new ArrayList<>(request.getParts())); + parts = List.copyOf(request.getParts()); int allPartsCount = parts.size(); final String contextIdentifier = UUID.randomUUID().toString(); for (int i = 0; i < allPartsCount; i++) { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java index ff5e4400d3..ca03f3a804 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/HandleHttpResponse.java @@ -16,16 +16,7 @@ */ package org.apache.nifi.processors.standard; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.regex.Pattern; - import jakarta.servlet.http.HttpServletResponse; - import org.apache.nifi.annotation.behavior.DynamicProperty; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; @@ -47,6 +38,12 @@ import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processors.standard.util.HTTPUtils; import org.apache.nifi.util.StopWatch; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.regex.Pattern; + @InputRequirement(Requirement.INPUT_REQUIRED) @Tags({"http", "https", "response", "egress", "web service"}) @CapabilityDescription("Sends an HTTP Response to the Requestor that generated a FlowFile. This Processor is designed to be used in conjunction with " @@ -85,6 +82,12 @@ public class HandleHttpResponse extends AbstractProcessor { .required(false) .build(); + private static final List PROPERTIES = List.of( + STATUS_CODE, + HTTP_CONTEXT_MAP, + ATTRIBUTES_AS_HEADERS_REGEX + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("FlowFiles will be routed to this Relationship after the response has been successfully sent to the requestor") @@ -95,21 +98,16 @@ public class HandleHttpResponse extends AbstractProcessor { + "for instance, if the connection times out or if NiFi is restarted before responding to the HTTP Request.") .build(); + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS, REL_FAILURE); + @Override public final List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(); - properties.add(STATUS_CODE); - properties.add(HTTP_CONTEXT_MAP); - properties.add(ATTRIBUTES_AS_HEADERS_REGEX); - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - relationships.add(REL_FAILURE); - return relationships; + return RELATIONSHIPS; } @Override @@ -217,7 +215,7 @@ public class HandleHttpResponse extends AbstractProcessor { } private static boolean isNumber(final String value) { - if (value.length() == 0) { + if (value.isEmpty()) { return false; } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java index c158a32504..aef117d93a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/IdentifyMimeType.java @@ -38,7 +38,6 @@ import org.apache.nifi.migration.PropertyConfiguration; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; @@ -60,9 +59,7 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -134,13 +131,19 @@ public class IdentifyMimeType extends AbstractProcessor { .dependsOn(CONFIG_STRATEGY, REPLACE, MERGE) .build(); + private static final List PROPERTIES = List.of( + USE_FILENAME_IN_DETECTION, + CONFIG_STRATEGY, + MIME_CONFIG_BODY, + MIME_CONFIG_FILE + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("All FlowFiles are routed to success") .build(); - private Set relationships; - private List properties; + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); private final TikaConfig config; private Detector detector; @@ -151,21 +154,6 @@ public class IdentifyMimeType extends AbstractProcessor { this.config = TikaConfig.getDefaultConfig(); } - @Override - protected void init(final ProcessorInitializationContext context) { - - final List properties = new ArrayList<>(); - properties.add(USE_FILENAME_IN_DETECTION); - properties.add(CONFIG_STRATEGY); - properties.add(MIME_CONFIG_BODY); - properties.add(MIME_CONFIG_FILE); - this.properties = Collections.unmodifiableList(properties); - - final Set rels = new HashSet<>(); - rels.add(REL_SUCCESS); - this.relationships = Collections.unmodifiableSet(rels); - } - @Override public void migrateProperties(PropertyConfiguration config) { if (!config.hasProperty(CONFIG_STRATEGY)) { @@ -212,12 +200,12 @@ public class IdentifyMimeType extends AbstractProcessor { @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java index 15a2740ed4..89486b1696 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/InvokeHTTP.java @@ -109,7 +109,6 @@ import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -177,7 +176,7 @@ public class InvokeHTTP extends AbstractProcessor { private static final Pattern FORM_DATA_NAME_PARAMETER_PATTERN = Pattern.compile("post:form:(?.*)$"); private static final String FORM_DATA_NAME_GROUP = "formDataName"; - private static final Set IGNORED_REQUEST_ATTRIBUTES = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( + private static final Set IGNORED_REQUEST_ATTRIBUTES = Set.of( STATUS_CODE, STATUS_MESSAGE, RESPONSE_BODY, @@ -189,7 +188,7 @@ public class InvokeHTTP extends AbstractProcessor { EXCEPTION_MESSAGE, CoreAttributes.UUID.key(), CoreAttributes.PATH.key() - ))); + ); public static final PropertyDescriptor HTTP_METHOD = new PropertyDescriptor.Builder() .name("HTTP Method") @@ -345,7 +344,7 @@ public class InvokeHTTP extends AbstractProcessor { .name("Request Content-Encoding") .description("HTTP Content-Encoding applied to request body during transmission. The receiving server must support the selected encoding to avoid request failures.") .required(true) - .defaultValue(ContentEncodingStrategy.DISABLED.getValue()) + .defaultValue(ContentEncodingStrategy.DISABLED) .allowableValues(ContentEncodingStrategy.class) .dependsOn(HTTP_METHOD, HttpMethod.PATCH.name(), HttpMethod.POST.name(), HttpMethod.PUT.name()) .build(); @@ -656,7 +655,7 @@ public class InvokeHTTP extends AbstractProcessor { } else if (oldValue == null) { // new property newDynamicPropertyNames.add(descriptor.getName()); } - this.dynamicPropertyNames = Collections.unmodifiableSet(newDynamicPropertyNames); + this.dynamicPropertyNames = Set.copyOf(newDynamicPropertyNames); } else { // compile the attributes-to-send filter pattern if (REQUEST_HEADER_ATTRIBUTES_PATTERN.getName().equalsIgnoreCase(descriptor.getName())) { @@ -753,7 +752,7 @@ public class InvokeHTTP extends AbstractProcessor { } if (context.getProperty(HTTP2_DISABLED).asBoolean()) { - okHttpClientBuilder.protocols(Collections.singletonList(Protocol.HTTP_1_1)); + okHttpClientBuilder.protocols(List.of(Protocol.HTTP_1_1)); } okHttpClientBuilder.followRedirects(context.getProperty(RESPONSE_REDIRECTS_ENABLED).asBoolean()); @@ -1033,8 +1032,8 @@ public class InvokeHTTP extends AbstractProcessor { } } - final String contentEncoding = context.getProperty(REQUEST_CONTENT_ENCODING).getValue(); - final ContentEncodingStrategy contentEncodingStrategy = ContentEncodingStrategy.valueOf(contentEncoding); + final ContentEncodingStrategy contentEncodingStrategy = + context.getProperty(REQUEST_CONTENT_ENCODING).asAllowableValue(ContentEncodingStrategy.class); if (ContentEncodingStrategy.GZIP == contentEncodingStrategy) { requestBuilder.addHeader(HttpHeader.CONTENT_ENCODING.getHeader(), ContentEncodingStrategy.GZIP.getValue().toLowerCase()); } @@ -1111,7 +1110,7 @@ public class InvokeHTTP extends AbstractProcessor { } }; - if (propertyDescriptors.size() > 0 || StringUtils.isNotEmpty(formDataName)) { + if (!propertyDescriptors.isEmpty() || StringUtils.isNotEmpty(formDataName)) { // we have form data MultipartBody.Builder builder = new Builder().setType(MultipartBody.FORM); boolean useFileName = context.getProperty(REQUEST_FORM_DATA_FILENAME_ENABLED).asBoolean(); @@ -1248,7 +1247,7 @@ public class InvokeHTTP extends AbstractProcessor { sb.append(entry.getKey()); sb.append(": "); if (list.size() == 1) { - sb.append(list.get(0)); + sb.append(list.getFirst()); } else { sb.append(list); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JoinEnrichment.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JoinEnrichment.java index 44f504da0e..8df227b40b 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JoinEnrichment.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/JoinEnrichment.java @@ -71,11 +71,8 @@ import org.apache.nifi.util.db.JdbcProperties; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Queue; @@ -189,18 +186,18 @@ public class JoinEnrichment extends BinFiles { .defaultValue("10000") .build(); - private static final List properties = Collections.unmodifiableList(Arrays.asList( - ORIGINAL_RECORD_READER, - ENRICHMENT_RECORD_READER, - RECORD_WRITER, - JOIN_STRATEGY, - SQL, - DEFAULT_PRECISION, - DEFAULT_SCALE, - INSERTION_RECORD_PATH, - MAX_BIN_COUNT, - TIMEOUT - )); + private static final List PROPERTIES = List.of( + ORIGINAL_RECORD_READER, + ENRICHMENT_RECORD_READER, + RECORD_WRITER, + JOIN_STRATEGY, + SQL, + DEFAULT_PRECISION, + DEFAULT_SCALE, + INSERTION_RECORD_PATH, + MAX_BIN_COUNT, + TIMEOUT + ); // Relationships static final Relationship REL_JOINED = new Relationship.Builder() @@ -223,23 +220,23 @@ public class JoinEnrichment extends BinFiles { "relationship.") .build(); - private static final Set relationships = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( - REL_JOINED, - REL_ORIGINAL, - REL_TIMEOUT, - REL_FAILURE - ))); + private static final Set RELATIONSHIPS = Set.of( + REL_JOINED, + REL_ORIGINAL, + REL_TIMEOUT, + REL_FAILURE + ); private final SqlJoinCache sqlJoinCache = new SqlJoinCache(getLogger()); @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @OnStopped @@ -307,10 +304,10 @@ public class JoinEnrichment extends BinFiles { session.transfer(flowFiles, REL_TIMEOUT); - final FlowFile flowFile = flowFiles.get(0); + final FlowFile flowFile = flowFiles.getFirst(); final EnrichmentRole role = getEnrichmentRole(flowFile); final String missingType = (role == null) ? "other" : getOtherEnrichmentRole(role).name(); - getLogger().warn("Timed out waiting for the {} FlowFile to match {}; routing to {}", missingType, flowFiles.get(0), REL_TIMEOUT.getName()); + getLogger().warn("Timed out waiting for the {} FlowFile to match {}; routing to {}", missingType, flowFiles.getFirst(), REL_TIMEOUT.getName()); session.commitAsync(); return new BinProcessingResult(true); @@ -413,8 +410,7 @@ public class JoinEnrichment extends BinFiles { try (final InputStream rawIn = session.read(flowFile)) { final Map enrichmentAttributes = flowFile.getAttributes(); final RecordReader reader = recordReaderFactory.createRecordReader(enrichmentAttributes, rawIn, flowFile.getSize(), getLogger()); - final RecordSchema schema = reader.getSchema(); - return schema; + return reader.getSchema(); } } @@ -466,20 +462,12 @@ public class JoinEnrichment extends BinFiles { } private EnrichmentRole getOtherEnrichmentRole(final EnrichmentRole role) { - if (role == null) { - return null; - } - - switch (role) { - case ENRICHMENT: - return EnrichmentRole.ORIGINAL; - case ORIGINAL: - return EnrichmentRole.ENRICHMENT; - case UNKNOWN: - return EnrichmentRole.UNKNOWN; - } - - return null; + return switch (role) { + case ENRICHMENT -> EnrichmentRole.ORIGINAL; + case ORIGINAL -> EnrichmentRole.ENRICHMENT; + case UNKNOWN -> EnrichmentRole.UNKNOWN; + case null -> null; + }; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListDatabaseTables.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListDatabaseTables.java index c4432e8df5..ee2c20b871 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListDatabaseTables.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListDatabaseTables.java @@ -60,10 +60,8 @@ import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -148,12 +146,6 @@ public class ListDatabaseTables extends AbstractProcessor { public static final String DB_TABLE_REMARKS = "db.table.remarks"; public static final String DB_TABLE_COUNT = "db.table.count"; - // Relationships - public static final Relationship REL_SUCCESS = new Relationship.Builder() - .name("success") - .description("All FlowFiles that are received are routed to success") - .build(); - // Property descriptors public static final PropertyDescriptor DBCP_SERVICE = new PropertyDescriptor.Builder() .name("list-db-tables-db-connection") @@ -235,39 +227,33 @@ public class ListDatabaseTables extends AbstractProcessor { .identifiesControllerService(RecordSetWriterFactory.class) .build(); + private static final List PROPERTIES = List.of( + DBCP_SERVICE, + CATALOG, + SCHEMA_PATTERN, + TABLE_NAME_PATTERN, + TABLE_TYPES, + INCLUDE_COUNT, + RECORD_WRITER, + REFRESH_INTERVAL + ); - private static final List propertyDescriptors; - private static final Set relationships; + // Relationships + public static final Relationship REL_SUCCESS = new Relationship.Builder() + .name("success") + .description("All FlowFiles that are received are routed to success") + .build(); - /* - * Will ensure that the list of property descriptors is build only once. - * Will also create a Set of relationships - */ - static { - final List _propertyDescriptors = new ArrayList<>(); - _propertyDescriptors.add(DBCP_SERVICE); - _propertyDescriptors.add(CATALOG); - _propertyDescriptors.add(SCHEMA_PATTERN); - _propertyDescriptors.add(TABLE_NAME_PATTERN); - _propertyDescriptors.add(TABLE_TYPES); - _propertyDescriptors.add(INCLUDE_COUNT); - _propertyDescriptors.add(RECORD_WRITER); - _propertyDescriptors.add(REFRESH_INTERVAL); - propertyDescriptors = Collections.unmodifiableList(_propertyDescriptors); - - final Set _relationships = new HashSet<>(); - _relationships.add(REL_SUCCESS); - relationships = Collections.unmodifiableSet(_relationships); - } + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); @Override protected List getSupportedPropertyDescriptors() { - return propertyDescriptors; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override @@ -437,7 +423,6 @@ public class ListDatabaseTables extends AbstractProcessor { } static class RecordTableListingWriter implements TableListingWriter { - private static final RecordSchema RECORD_SCHEMA; public static final String TABLE_NAME = "tableName"; public static final String TABLE_CATALOG = "catalog"; public static final String TABLE_SCHEMA = "schemaName"; @@ -446,19 +431,15 @@ public class ListDatabaseTables extends AbstractProcessor { public static final String TABLE_REMARKS = "remarks"; public static final String TABLE_ROW_COUNT = "rowCount"; - - static { - final List fields = new ArrayList<>(); - fields.add(new RecordField(TABLE_NAME, RecordFieldType.STRING.getDataType(), false)); - fields.add(new RecordField(TABLE_CATALOG, RecordFieldType.STRING.getDataType())); - fields.add(new RecordField(TABLE_SCHEMA, RecordFieldType.STRING.getDataType())); - fields.add(new RecordField(TABLE_FULLNAME, RecordFieldType.STRING.getDataType(), false)); - fields.add(new RecordField(TABLE_TYPE, RecordFieldType.STRING.getDataType(), false)); - fields.add(new RecordField(TABLE_REMARKS, RecordFieldType.STRING.getDataType(), false)); - fields.add(new RecordField(TABLE_ROW_COUNT, RecordFieldType.LONG.getDataType(), false)); - RECORD_SCHEMA = new SimpleRecordSchema(fields); - } - + private static final RecordSchema RECORD_SCHEMA = new SimpleRecordSchema(List.of( + new RecordField(TABLE_NAME, RecordFieldType.STRING.getDataType(), false), + new RecordField(TABLE_CATALOG, RecordFieldType.STRING.getDataType()), + new RecordField(TABLE_SCHEMA, RecordFieldType.STRING.getDataType()), + new RecordField(TABLE_FULLNAME, RecordFieldType.STRING.getDataType(), false), + new RecordField(TABLE_TYPE, RecordFieldType.STRING.getDataType(), false), + new RecordField(TABLE_REMARKS, RecordFieldType.STRING.getDataType(), false), + new RecordField(TABLE_ROW_COUNT, RecordFieldType.LONG.getDataType(), false) + )); private final ProcessSession session; private final RecordSetWriterFactory writerFactory; @@ -496,6 +477,7 @@ public class ListDatabaseTables extends AbstractProcessor { if (writeResult.getRecordCount() == 0) { session.remove(flowFile); } else { + // todo 13590 final Map attributes = new HashMap<>(writeResult.getAttributes()); attributes.put("record.count", String.valueOf(writeResult.getRecordCount())); flowFile = session.putAllAttributes(flowFile, attributes); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFTP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFTP.java index aebe086e8d..197db10422 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFTP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFTP.java @@ -34,15 +34,14 @@ import org.apache.nifi.components.ValidationResult; import org.apache.nifi.components.state.Scope; import org.apache.nifi.context.PropertyContext; import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.util.file.transfer.FileTransfer; import org.apache.nifi.processor.util.file.transfer.ListFileTransfer; import org.apache.nifi.processor.util.list.ListedEntityTracker; import org.apache.nifi.processors.standard.util.FTPTransfer; -import org.apache.nifi.processor.util.file.transfer.FileTransfer; +import org.apache.nifi.scheduling.SchedulingStrategy; -import java.util.ArrayList; import java.util.Collection; import java.util.List; -import org.apache.nifi.scheduling.SchedulingStrategy; @PrimaryNodeOnly @TriggerSerially @@ -71,42 +70,45 @@ import org.apache.nifi.scheduling.SchedulingStrategy; @DefaultSchedule(strategy = SchedulingStrategy.TIMER_DRIVEN, period = "1 min") public class ListFTP extends ListFileTransfer { + private static final PropertyDescriptor PORT = + new PropertyDescriptor.Builder().fromPropertyDescriptor(UNDEFAULTED_PORT).defaultValue("21").build(); + + private static final List PROPERTIES = List.of( + FILE_TRANSFER_LISTING_STRATEGY, + HOSTNAME, + PORT, + USERNAME, + FTPTransfer.PASSWORD, + REMOTE_PATH, + RECORD_WRITER, + DISTRIBUTED_CACHE_SERVICE, + FTPTransfer.RECURSIVE_SEARCH, + FTPTransfer.FOLLOW_SYMLINK, + FTPTransfer.FILE_FILTER_REGEX, + FTPTransfer.PATH_FILTER_REGEX, + FTPTransfer.IGNORE_DOTTED_FILES, + FTPTransfer.REMOTE_POLL_BATCH_SIZE, + FTPTransfer.CONNECTION_TIMEOUT, + FTPTransfer.DATA_TIMEOUT, + FTPTransfer.CONNECTION_MODE, + FTPTransfer.TRANSFER_MODE, + FTPTransfer.PROXY_CONFIGURATION_SERVICE, + FTPTransfer.PROXY_TYPE, + FTPTransfer.PROXY_HOST, + FTPTransfer.PROXY_PORT, + FTPTransfer.HTTP_PROXY_USERNAME, + FTPTransfer.HTTP_PROXY_PASSWORD, + FTPTransfer.BUFFER_SIZE, + TARGET_SYSTEM_TIMESTAMP_PRECISION, + ListedEntityTracker.TRACKING_STATE_CACHE, + ListedEntityTracker.TRACKING_TIME_WINDOW, + ListedEntityTracker.INITIAL_LISTING_TARGET, + FTPTransfer.UTF8_ENCODING + ); + @Override protected List getSupportedPropertyDescriptors() { - final PropertyDescriptor port = new PropertyDescriptor.Builder().fromPropertyDescriptor(UNDEFAULTED_PORT).defaultValue("21").build(); - - final List properties = new ArrayList<>(); - properties.add(FILE_TRANSFER_LISTING_STRATEGY); - properties.add(HOSTNAME); - properties.add(port); - properties.add(USERNAME); - properties.add(FTPTransfer.PASSWORD); - properties.add(REMOTE_PATH); - properties.add(RECORD_WRITER); - properties.add(DISTRIBUTED_CACHE_SERVICE); - properties.add(FTPTransfer.RECURSIVE_SEARCH); - properties.add(FTPTransfer.FOLLOW_SYMLINK); - properties.add(FTPTransfer.FILE_FILTER_REGEX); - properties.add(FTPTransfer.PATH_FILTER_REGEX); - properties.add(FTPTransfer.IGNORE_DOTTED_FILES); - properties.add(FTPTransfer.REMOTE_POLL_BATCH_SIZE); - properties.add(FTPTransfer.CONNECTION_TIMEOUT); - properties.add(FTPTransfer.DATA_TIMEOUT); - properties.add(FTPTransfer.CONNECTION_MODE); - properties.add(FTPTransfer.TRANSFER_MODE); - properties.add(FTPTransfer.PROXY_CONFIGURATION_SERVICE); - properties.add(FTPTransfer.PROXY_TYPE); - properties.add(FTPTransfer.PROXY_HOST); - properties.add(FTPTransfer.PROXY_PORT); - properties.add(FTPTransfer.HTTP_PROXY_USERNAME); - properties.add(FTPTransfer.HTTP_PROXY_PASSWORD); - properties.add(FTPTransfer.BUFFER_SIZE); - properties.add(TARGET_SYSTEM_TIMESTAMP_PRECISION); - properties.add(ListedEntityTracker.TRACKING_STATE_CACHE); - properties.add(ListedEntityTracker.TRACKING_TIME_WINDOW); - properties.add(ListedEntityTracker.INITIAL_LISTING_TARGET); - properties.add(FTPTransfer.UTF8_ENCODING); - return properties; + return PROPERTIES; } @Override @@ -130,5 +132,4 @@ public class ListFTP extends ListFileTransfer { protected void customValidate(ValidationContext validationContext, Collection results) { FTPTransfer.validateProxySpec(validationContext, results); } - } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java index c43126e7d2..2f461da48e 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListFile.java @@ -42,9 +42,9 @@ import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.util.StandardValidators; +import org.apache.nifi.processor.util.file.transfer.FileInfo; import org.apache.nifi.processor.util.list.AbstractListProcessor; import org.apache.nifi.processor.util.list.ListedEntityTracker; -import org.apache.nifi.processor.util.file.transfer.FileInfo; import org.apache.nifi.scheduling.SchedulingStrategy; import org.apache.nifi.serialization.record.RecordSchema; import org.apache.nifi.util.Tuple; @@ -67,12 +67,10 @@ import java.time.Instant; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; @@ -269,9 +267,32 @@ public class ListFile extends AbstractListProcessor { .defaultValue("3 mins") .build(); + private static final List PROPERTIES = List.of( + DIRECTORY, + LISTING_STRATEGY, + RECURSE, + RECORD_WRITER, + DIRECTORY_LOCATION, + FILE_FILTER, + PATH_FILTER, + INCLUDE_FILE_ATTRIBUTES, + MIN_AGE, + MAX_AGE, + MIN_SIZE, + MAX_SIZE, + IGNORE_HIDDEN_FILES, + TARGET_SYSTEM_TIMESTAMP_PRECISION, + ListedEntityTracker.TRACKING_STATE_CACHE, + ListedEntityTracker.TRACKING_TIME_WINDOW, + ListedEntityTracker.INITIAL_LISTING_TARGET, + ListedEntityTracker.NODE_IDENTIFIER, + TRACK_PERFORMANCE, + MAX_TRACKED_FILES, + MAX_DISK_OPERATION_TIME, + MAX_LISTING_TIME + ); - private List properties; - private Set relationships; + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); private volatile ScheduledExecutorService monitoringThreadPool; private volatile Future monitoringFuture; @@ -292,35 +313,6 @@ public class ListFile extends AbstractListProcessor { @Override protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(DIRECTORY); - properties.add(LISTING_STRATEGY); - properties.add(RECURSE); - properties.add(RECORD_WRITER); - properties.add(DIRECTORY_LOCATION); - properties.add(FILE_FILTER); - properties.add(PATH_FILTER); - properties.add(INCLUDE_FILE_ATTRIBUTES); - properties.add(MIN_AGE); - properties.add(MAX_AGE); - properties.add(MIN_SIZE); - properties.add(MAX_SIZE); - properties.add(IGNORE_HIDDEN_FILES); - properties.add(TARGET_SYSTEM_TIMESTAMP_PRECISION); - properties.add(ListedEntityTracker.TRACKING_STATE_CACHE); - properties.add(ListedEntityTracker.TRACKING_TIME_WINDOW); - properties.add(ListedEntityTracker.INITIAL_LISTING_TARGET); - properties.add(ListedEntityTracker.NODE_IDENTIFIER); - properties.add(TRACK_PERFORMANCE); - properties.add(MAX_TRACKED_FILES); - properties.add(MAX_DISK_OPERATION_TIME); - properties.add(MAX_LISTING_TIME); - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - this.relationships = Collections.unmodifiableSet(relationships); - monitoringThreadPool = Executors.newScheduledThreadPool(1, r -> { final Thread t = Executors.defaultThreadFactory().newThread(r); t.setName("Monitor ListFile Performance [UUID=" + context.getIdentifier() + "]"); @@ -332,12 +324,12 @@ public class ListFile extends AbstractListProcessor { @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @OnScheduled @@ -582,7 +574,7 @@ public class ListFile extends AbstractListProcessor { final long start = System.currentTimeMillis(); final List result = new LinkedList<>(); - Files.walkFileTree(basePath, Collections.singleton(FileVisitOption.FOLLOW_LINKS), maxDepth, new FileVisitor() { + Files.walkFileTree(basePath, Set.of(FileVisitOption.FOLLOW_LINKS), maxDepth, new FileVisitor() { @Override public FileVisitResult preVisitDirectory(final Path dir, final BasicFileAttributes attributes) { if (Files.isReadable(dir)) { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListSFTP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListSFTP.java index 1d3e498a0c..96bf97775c 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListSFTP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListSFTP.java @@ -36,21 +36,20 @@ import org.apache.nifi.components.state.Scope; import org.apache.nifi.context.PropertyContext; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; +import org.apache.nifi.processor.util.file.transfer.FileInfo; +import org.apache.nifi.processor.util.file.transfer.FileTransfer; import org.apache.nifi.processor.util.file.transfer.ListFileTransfer; import org.apache.nifi.processor.util.list.ListedEntityTracker; import org.apache.nifi.processors.standard.util.FTPTransfer; -import org.apache.nifi.processor.util.file.transfer.FileInfo; -import org.apache.nifi.processor.util.file.transfer.FileTransfer; import org.apache.nifi.processors.standard.util.SFTPTransfer; +import org.apache.nifi.scheduling.SchedulingStrategy; import java.io.IOException; -import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; import java.util.stream.Collectors; -import org.apache.nifi.scheduling.SchedulingStrategy; @PrimaryNodeOnly @TriggerSerially @@ -80,52 +79,54 @@ import org.apache.nifi.scheduling.SchedulingStrategy; @DefaultSchedule(strategy = SchedulingStrategy.TIMER_DRIVEN, period = "1 min") public class ListSFTP extends ListFileTransfer { + private static final List PROPERTIES = List.of( + FILE_TRANSFER_LISTING_STRATEGY, + SFTPTransfer.HOSTNAME, + SFTPTransfer.PORT, + SFTPTransfer.USERNAME, + SFTPTransfer.PASSWORD, + SFTPTransfer.PRIVATE_KEY_PATH, + SFTPTransfer.PRIVATE_KEY_PASSPHRASE, + REMOTE_PATH, + RECORD_WRITER, + DISTRIBUTED_CACHE_SERVICE, + SFTPTransfer.RECURSIVE_SEARCH, + SFTPTransfer.FOLLOW_SYMLINK, + SFTPTransfer.FILE_FILTER_REGEX, + SFTPTransfer.PATH_FILTER_REGEX, + SFTPTransfer.IGNORE_DOTTED_FILES, + SFTPTransfer.REMOTE_POLL_BATCH_SIZE, + SFTPTransfer.STRICT_HOST_KEY_CHECKING, + SFTPTransfer.HOST_KEY_FILE, + SFTPTransfer.CONNECTION_TIMEOUT, + SFTPTransfer.DATA_TIMEOUT, + SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT, + TARGET_SYSTEM_TIMESTAMP_PRECISION, + SFTPTransfer.USE_COMPRESSION, + SFTPTransfer.PROXY_CONFIGURATION_SERVICE, + FTPTransfer.PROXY_TYPE, + FTPTransfer.PROXY_HOST, + FTPTransfer.PROXY_PORT, + FTPTransfer.HTTP_PROXY_USERNAME, + FTPTransfer.HTTP_PROXY_PASSWORD, + ListedEntityTracker.TRACKING_STATE_CACHE, + ListedEntityTracker.TRACKING_TIME_WINDOW, + ListedEntityTracker.INITIAL_LISTING_TARGET, + ListFile.MIN_AGE, + ListFile.MAX_AGE, + ListFile.MIN_SIZE, + ListFile.MAX_SIZE, + SFTPTransfer.CIPHERS_ALLOWED, + SFTPTransfer.KEY_ALGORITHMS_ALLOWED, + SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED, + SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED + ); + private volatile Predicate fileFilter; @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(); - properties.add(FILE_TRANSFER_LISTING_STRATEGY); - properties.add(SFTPTransfer.HOSTNAME); - properties.add(SFTPTransfer.PORT); - properties.add(SFTPTransfer.USERNAME); - properties.add(SFTPTransfer.PASSWORD); - properties.add(SFTPTransfer.PRIVATE_KEY_PATH); - properties.add(SFTPTransfer.PRIVATE_KEY_PASSPHRASE); - properties.add(REMOTE_PATH); - properties.add(RECORD_WRITER); - properties.add(DISTRIBUTED_CACHE_SERVICE); - properties.add(SFTPTransfer.RECURSIVE_SEARCH); - properties.add(SFTPTransfer.FOLLOW_SYMLINK); - properties.add(SFTPTransfer.FILE_FILTER_REGEX); - properties.add(SFTPTransfer.PATH_FILTER_REGEX); - properties.add(SFTPTransfer.IGNORE_DOTTED_FILES); - properties.add(SFTPTransfer.REMOTE_POLL_BATCH_SIZE); - properties.add(SFTPTransfer.STRICT_HOST_KEY_CHECKING); - properties.add(SFTPTransfer.HOST_KEY_FILE); - properties.add(SFTPTransfer.CONNECTION_TIMEOUT); - properties.add(SFTPTransfer.DATA_TIMEOUT); - properties.add(SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT); - properties.add(TARGET_SYSTEM_TIMESTAMP_PRECISION); - properties.add(SFTPTransfer.USE_COMPRESSION); - properties.add(SFTPTransfer.PROXY_CONFIGURATION_SERVICE); - properties.add(FTPTransfer.PROXY_TYPE); - properties.add(FTPTransfer.PROXY_HOST); - properties.add(FTPTransfer.PROXY_PORT); - properties.add(FTPTransfer.HTTP_PROXY_USERNAME); - properties.add(FTPTransfer.HTTP_PROXY_PASSWORD); - properties.add(ListedEntityTracker.TRACKING_STATE_CACHE); - properties.add(ListedEntityTracker.TRACKING_TIME_WINDOW); - properties.add(ListedEntityTracker.INITIAL_LISTING_TARGET); - properties.add(ListFile.MIN_AGE); - properties.add(ListFile.MAX_AGE); - properties.add(ListFile.MIN_SIZE); - properties.add(ListFile.MAX_SIZE); - properties.add(SFTPTransfer.CIPHERS_ALLOWED); - properties.add(SFTPTransfer.KEY_ALGORITHMS_ALLOWED); - properties.add(SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED); - properties.add(SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED); - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenFTP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenFTP.java index 22b4c485ab..c0785037e2 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenFTP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenFTP.java @@ -41,10 +41,7 @@ import org.apache.nifi.ssl.SSLContextService; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.CountDownLatch; @@ -78,11 +75,6 @@ public class ListenFTP extends AbstractSessionFactoryProcessor { .identifiesControllerService(SSLContextService.class) .build(); - public static final Relationship RELATIONSHIP_SUCCESS = new Relationship.Builder() - .name("success") - .description("Relationship for successfully received files.") - .build(); - public static final PropertyDescriptor BIND_ADDRESS = new PropertyDescriptor.Builder() .name("bind-address") .displayName("Bind Address") @@ -125,17 +117,20 @@ public class ListenFTP extends AbstractSessionFactoryProcessor { .sensitive(true) .build(); - private static final List PROPERTIES = Collections.unmodifiableList(Arrays.asList( + private static final List PROPERTIES = List.of( BIND_ADDRESS, PORT, USERNAME, PASSWORD, SSL_CONTEXT_SERVICE - )); + ); - private static final Set RELATIONSHIPS = Collections.unmodifiableSet(new HashSet<>(Collections.singletonList( - RELATIONSHIP_SUCCESS - ))); + public static final Relationship RELATIONSHIP_SUCCESS = new Relationship.Builder() + .name("success") + .description("Relationship for successfully received files.") + .build(); + + private static final Set RELATIONSHIPS = Set.of(RELATIONSHIP_SUCCESS); private volatile FtpServer ftpServer; private volatile CountDownLatch sessionFactorySetSignal; diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java index 6d874b669f..d8ae0d6c96 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenHTTP.java @@ -16,6 +16,9 @@ */ package org.apache.nifi.processors.standard; +import jakarta.servlet.Servlet; +import jakarta.servlet.http.HttpServletResponse; +import jakarta.ws.rs.Path; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.documentation.CapabilityDescription; @@ -52,21 +55,17 @@ import org.apache.nifi.ssl.RestrictedSSLContextService; import org.apache.nifi.ssl.SSLContextService; import org.apache.nifi.stream.io.LeakyBucketStreamThrottler; import org.apache.nifi.stream.io.StreamThrottler; -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.ee10.servlet.ServletContextHandler; import org.eclipse.jetty.ee10.servlet.ServletHolder; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.util.thread.QueuedThreadPool; import javax.net.ssl.SSLContext; -import jakarta.servlet.Servlet; -import jakarta.servlet.http.HttpServletResponse; -import jakarta.ws.rs.Path; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -119,11 +118,6 @@ public class ListenHTTP extends AbstractSessionFactoryProcessor { } } - public static final Relationship RELATIONSHIP_SUCCESS = new Relationship.Builder() - .name("success") - .description("Relationship for successfully received FlowFiles") - .build(); - public static final PropertyDescriptor BASE_PATH = new PropertyDescriptor.Builder() .name("Base Path") .description("Base path for incoming connections") @@ -275,7 +269,7 @@ public class ListenHTTP extends AbstractSessionFactoryProcessor { .dependsOn(RECORD_READER) .build(); - protected static final List PROPERTIES = Collections.unmodifiableList(Arrays.asList( + protected static final List PROPERTIES = List.of( BASE_PATH, PORT, HEALTH_CHECK_PORT, @@ -293,11 +287,14 @@ public class ListenHTTP extends AbstractSessionFactoryProcessor { MAX_THREAD_POOL_SIZE, RECORD_READER, RECORD_WRITER - )); + ); - private static final Set RELATIONSHIPS = Collections.unmodifiableSet(new HashSet<>(Collections.singletonList( - RELATIONSHIP_SUCCESS - ))); + public static final Relationship RELATIONSHIP_SUCCESS = new Relationship.Builder() + .name("success") + .description("Relationship for successfully received FlowFiles") + .build(); + + private static final Set RELATIONSHIPS = Set.of(RELATIONSHIP_SUCCESS); public static final String CONTEXT_ATTRIBUTE_PROCESSOR = "processor"; public static final String CONTEXT_ATTRIBUTE_LOGGER = "logger"; @@ -533,13 +530,9 @@ public class ListenHTTP extends AbstractSessionFactoryProcessor { } protected Set> getServerClasses() { - final Set> s = new HashSet<>(); // NOTE: Servlets added below MUST have a Path annotation // any servlets other than ListenHTTPServlet must have a Path annotation start with / - s.add(ListenHTTPServlet.class); - s.add(ContentAcknowledgmentServlet.class); - s.add(HealthCheckServlet.class); - return s; + return Set.of(ListenHTTPServlet.class, ContentAcknowledgmentServlet.class, HealthCheckServlet.class); } private Set findOldFlowFileIds(final ProcessContext ctx) { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenSyslog.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenSyslog.java index 2e1231c956..5b8d2d352e 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenSyslog.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenSyslog.java @@ -41,7 +41,6 @@ import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; @@ -60,9 +59,7 @@ import java.net.SocketException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -196,6 +193,23 @@ public class ListenSyslog extends AbstractSyslogProcessor { .dependsOn(PROTOCOL, TCP_VALUE) .build(); + private static final List PROPERTIES = List.of( + PROTOCOL, + PORT, + NETWORK_INTF_NAME, + SOCKET_KEEP_ALIVE, + SSL_CONTEXT_SERVICE, + CLIENT_AUTH, + RECV_BUFFER_SIZE, + MAX_MESSAGE_QUEUE_SIZE, + MAX_SOCKET_BUFFER_SIZE, + MAX_CONNECTIONS, + MAX_BATCH_SIZE, + MESSAGE_DELIMITER, + PARSE_MESSAGES, + CHARSET + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("Syslog messages that match one of the expected formats will be sent out this relationship as a FlowFile per message.") @@ -205,51 +219,28 @@ public class ListenSyslog extends AbstractSyslogProcessor { .description("Syslog messages that do not match one of the expected formats will be sent out this relationship as a FlowFile per message.") .build(); + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_INVALID + ); + protected static final String RECEIVED_COUNTER = "Messages Received"; protected static final String SUCCESS_COUNTER = "FlowFiles Transferred to Success"; private static final String DEFAULT_MIME_TYPE = "text/plain"; - private Set relationships; - private List descriptors; - private volatile EventServer eventServer; private volatile SyslogParser parser; private volatile BlockingQueue syslogEvents = new LinkedBlockingQueue<>(); private volatile byte[] messageDemarcatorBytes; //it is only the array reference that is volatile - not the contents. - @Override - protected void init(final ProcessorInitializationContext context) { - final List descriptors = new ArrayList<>(); - descriptors.add(PROTOCOL); - descriptors.add(PORT); - descriptors.add(NETWORK_INTF_NAME); - descriptors.add(SOCKET_KEEP_ALIVE); - descriptors.add(SSL_CONTEXT_SERVICE); - descriptors.add(CLIENT_AUTH); - descriptors.add(RECV_BUFFER_SIZE); - descriptors.add(MAX_MESSAGE_QUEUE_SIZE); - descriptors.add(MAX_SOCKET_BUFFER_SIZE); - descriptors.add(MAX_CONNECTIONS); - descriptors.add(MAX_BATCH_SIZE); - descriptors.add(MESSAGE_DELIMITER); - descriptors.add(PARSE_MESSAGES); - descriptors.add(CHARSET); - this.descriptors = Collections.unmodifiableList(descriptors); - - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - relationships.add(REL_INVALID); - this.relationships = Collections.unmodifiableSet(relationships); - } - @Override public Set getRelationships() { - return this.relationships; + return RELATIONSHIPS; } @Override public List getSupportedPropertyDescriptors() { - return descriptors; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenTCP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenTCP.java index 0404dfd990..70a5223da9 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenTCP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenTCP.java @@ -41,7 +41,6 @@ import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; @@ -62,9 +61,7 @@ import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -144,17 +141,35 @@ public class ListenTCP extends AbstractProcessor { .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .build(); + private static final List PROPERTIES = List.of( + ListenerProperties.NETWORK_INTF_NAME, + ListenerProperties.PORT, + ListenerProperties.RECV_BUFFER_SIZE, + ListenerProperties.MAX_MESSAGE_QUEUE_SIZE, + ListenerProperties.MAX_SOCKET_BUFFER_SIZE, + ListenerProperties.CHARSET, + ListenerProperties.WORKER_THREADS, + ListenerProperties.MAX_BATCH_SIZE, + ListenerProperties.MESSAGE_DELIMITER, + IDLE_CONNECTION_TIMEOUT, + // Deprecated + MAX_RECV_THREAD_POOL_SIZE, + POOL_RECV_BUFFERS, + SSL_CONTEXT_SERVICE, + CLIENT_AUTH + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("Messages received successfully will be sent out this relationship.") .build(); + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); + private static final long TRACKING_LOG_INTERVAL = 60000; private final AtomicLong nextTrackingLog = new AtomicLong(); private int eventsCapacity; - protected List descriptors; - protected Set relationships; protected volatile int port; protected volatile TrackingLinkedBlockingQueue events; protected volatile BlockingQueue errorEvents; @@ -162,31 +177,6 @@ public class ListenTCP extends AbstractProcessor { protected volatile byte[] messageDemarcatorBytes; protected volatile EventBatcher eventBatcher; - @Override - protected void init(final ProcessorInitializationContext context) { - final List descriptors = new ArrayList<>(); - descriptors.add(ListenerProperties.NETWORK_INTF_NAME); - descriptors.add(ListenerProperties.PORT); - descriptors.add(ListenerProperties.RECV_BUFFER_SIZE); - descriptors.add(ListenerProperties.MAX_MESSAGE_QUEUE_SIZE); - descriptors.add(ListenerProperties.MAX_SOCKET_BUFFER_SIZE); - descriptors.add(ListenerProperties.CHARSET); - descriptors.add(ListenerProperties.WORKER_THREADS); - descriptors.add(ListenerProperties.MAX_BATCH_SIZE); - descriptors.add(ListenerProperties.MESSAGE_DELIMITER); - descriptors.add(IDLE_CONNECTION_TIMEOUT); - // Deprecated - descriptors.add(MAX_RECV_THREAD_POOL_SIZE); - descriptors.add(POOL_RECV_BUFFERS); - descriptors.add(SSL_CONTEXT_SERVICE); - descriptors.add(CLIENT_AUTH); - this.descriptors = Collections.unmodifiableList(descriptors); - - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - this.relationships = Collections.unmodifiableSet(relationships); - } - @OnScheduled public void onScheduled(ProcessContext context) throws IOException { int workerThreads = context.getProperty(ListenerProperties.WORKER_THREADS).asInteger(); @@ -245,14 +235,14 @@ public class ListenTCP extends AbstractProcessor { FlowFile flowFile = entry.getValue().getFlowFile(); final List events = entry.getValue().getEvents(); - if (flowFile.getSize() == 0L || events.size() == 0) { + if (flowFile.getSize() == 0L || events.isEmpty()) { session.remove(flowFile); getLogger().debug("No data written to FlowFile from batch {}; removing FlowFile", entry.getKey()); continue; } final Map attributes = getAttributes(entry.getValue()); - addClientCertificateAttributes(attributes, events.get(0)); + addClientCertificateAttributes(attributes, events.getFirst()); flowFile = session.putAllAttributes(flowFile, attributes); getLogger().debug("Transferring {} to success", flowFile); @@ -290,7 +280,7 @@ public class ListenTCP extends AbstractProcessor { protected Map getAttributes(final FlowFileEventBatch batch) { final List events = batch.getEvents(); - final String sender = events.get(0).getSender(); + final String sender = events.getFirst().getSender(); final Map attributes = new HashMap<>(3); attributes.put("tcp.sender", sender); attributes.put("tcp.port", String.valueOf(port)); @@ -299,19 +289,19 @@ public class ListenTCP extends AbstractProcessor { protected String getTransitUri(final FlowFileEventBatch batch) { final List events = batch.getEvents(); - final String sender = events.get(0).getSender(); + final String sender = events.getFirst().getSender(); final String senderHost = sender.startsWith("/") && sender.length() > 1 ? sender.substring(1) : sender; return String.format("tcp://%s:%d", senderHost, port); } @Override public final Set getRelationships() { - return this.relationships; + return this.RELATIONSHIPS; } @Override public List getSupportedPropertyDescriptors() { - return descriptors; + return PROPERTIES; } private String getMessageDemarcator(final ProcessContext context) { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java index cd34d592a6..8b1ba1165d 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDP.java @@ -45,7 +45,6 @@ import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -82,15 +81,17 @@ public class ListenUDP extends AbstractListenEventBatchingProcessor ADDITIONAL_PROPERTIES = List.of( + SENDING_HOST, + SENDING_HOST_PORT + ); + public static final String UDP_PORT_ATTR = "udp.port"; public static final String UDP_SENDER_ATTR = "udp.sender"; @Override protected List getAdditionalProperties() { - return Arrays.asList( - SENDING_HOST, - SENDING_HOST_PORT - ); + return ADDITIONAL_PROPERTIES; } @Override @@ -132,7 +133,7 @@ public class ListenUDP extends AbstractListenEventBatchingProcessor getAttributes(final FlowFileEventBatch batch) { - final String sender = batch.getEvents().get(0).getSender(); + final String sender = batch.getEvents().getFirst().getSender(); final Map attributes = new HashMap<>(3); attributes.put(UDP_SENDER_ATTR, sender); attributes.put(UDP_PORT_ATTR, String.valueOf(port)); @@ -141,11 +142,10 @@ public class ListenUDP extends AbstractListenEventBatchingProcessor 1 ? sender.substring(1) : sender; - final String transitUri = new StringBuilder().append("udp").append("://").append(senderHost).append(":") + return new StringBuilder().append("udp").append("://").append(senderHost).append(":") .append(port).toString(); - return transitUri; } public static class HostValidator implements Validator { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDPRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDPRecord.java index 9f65bbbc70..9528d1540a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDPRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ListenUDPRecord.java @@ -61,7 +61,6 @@ import java.io.OutputStream; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -142,12 +141,23 @@ public class ListenUDPRecord extends AbstractListenEventProcessor .required(true) .build(); + private static final List ADDITIONAL_PROPERTIES = List.of( + POLL_TIMEOUT, + BATCH_SIZE, + RECORD_READER, + RECORD_WRITER, + SENDING_HOST, + SENDING_HOST_PORT + ); + public static final Relationship REL_PARSE_FAILURE = new Relationship.Builder() .name("parse.failure") .description("If a datagram cannot be parsed using the configured Record Reader, the contents of the " + "message will be routed to this Relationship as its own individual FlowFile.") .build(); + private static final List ADDITIONAL_RELATIONSHIPS = List.of(REL_PARSE_FAILURE); + public static final String UDP_PORT_ATTR = "udp.port"; public static final String UDP_SENDER_ATTR = "udp.sender"; public static final String RECORD_COUNT_ATTR = "record.count"; @@ -156,19 +166,12 @@ public class ListenUDPRecord extends AbstractListenEventProcessor @Override protected List getAdditionalProperties() { - return Arrays.asList( - POLL_TIMEOUT, - BATCH_SIZE, - RECORD_READER, - RECORD_WRITER, - SENDING_HOST, - SENDING_HOST_PORT - ); + return ADDITIONAL_PROPERTIES; } @Override protected List getAdditionalRelationships() { - return Arrays.asList(REL_PARSE_FAILURE); + return ADDITIONAL_RELATIONSHIPS; } @Override @@ -255,7 +258,7 @@ public class ListenUDPRecord extends AbstractListenEventProcessor continue; } - if (records.size() == 0) { + if (records.isEmpty()) { handleParseFailure(event, session, null); continue; } @@ -396,9 +399,8 @@ public class ListenUDPRecord extends AbstractListenEventProcessor private String getTransitUri(final String sender) { final String senderHost = sender.startsWith("/") && sender.length() > 1 ? sender.substring(1) : sender; - final String transitUri = new StringBuilder().append("udp").append("://").append(senderHost).append(":") + return new StringBuilder().append("udp").append("://").append(senderHost).append(":") .append(port).toString(); - return transitUri; } /** diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java index af79cb5262..ed0bae85d6 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogAttribute.java @@ -33,7 +33,6 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.io.InputStreamCallback; @@ -43,9 +42,7 @@ import org.eclipse.jetty.util.StringUtil; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.List; @@ -146,6 +143,19 @@ public class LogAttribute extends AbstractProcessor { .required(true) .build(); + private static final List PROPERTIES = List.of( + LOG_LEVEL, + LOG_PAYLOAD, + ATTRIBUTES_TO_LOG_CSV, + ATTRIBUTES_TO_LOG_REGEX, + ATTRIBUTES_TO_IGNORE_CSV, + ATTRIBUTES_TO_IGNORE_REGEX, + LOG_FLOWFILE_PROPERTIES, + OUTPUT_FORMAT, + LOG_PREFIX, + CHARSET + ); + public static final String FIFTY_DASHES = "--------------------------------------------------"; public static enum DebugLevels { @@ -153,43 +163,22 @@ public class LogAttribute extends AbstractProcessor { } public static final long ONE_MB = 1024 * 1024; - private Set relationships; - private List supportedDescriptors; public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("All FlowFiles are routed to this relationship") .build(); - @Override - protected void init(final ProcessorInitializationContext context) { - final Set procRels = new HashSet<>(); - procRels.add(REL_SUCCESS); - relationships = Collections.unmodifiableSet(procRels); - - // descriptors - final List supDescriptors = new ArrayList<>(); - supDescriptors.add(LOG_LEVEL); - supDescriptors.add(LOG_PAYLOAD); - supDescriptors.add(ATTRIBUTES_TO_LOG_CSV); - supDescriptors.add(ATTRIBUTES_TO_LOG_REGEX); - supDescriptors.add(ATTRIBUTES_TO_IGNORE_CSV); - supDescriptors.add(ATTRIBUTES_TO_IGNORE_REGEX); - supDescriptors.add(LOG_FLOWFILE_PROPERTIES); - supDescriptors.add(OUTPUT_FORMAT); - supDescriptors.add(LOG_PREFIX); - supDescriptors.add(CHARSET); - supportedDescriptors = Collections.unmodifiableList(supDescriptors); - } + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return supportedDescriptors; + return PROPERTIES; } protected String processFlowFile(final ComponentLog logger, final DebugLevels logLevel, final FlowFile flowFile, final ProcessSession session, final ProcessContext context) { @@ -331,24 +320,13 @@ public class LogAttribute extends AbstractProcessor { } final ComponentLog LOG = getLogger(); - boolean isLogLevelEnabled = false; - switch (logLevel) { - case trace: - isLogLevelEnabled = LOG.isTraceEnabled(); - break; - case debug: - isLogLevelEnabled = LOG.isDebugEnabled(); - break; - case info: - isLogLevelEnabled = LOG.isInfoEnabled(); - break; - case warn: - isLogLevelEnabled = LOG.isWarnEnabled(); - break; - case error: - isLogLevelEnabled = LOG.isErrorEnabled(); - break; - } + boolean isLogLevelEnabled = switch (logLevel) { + case trace -> LOG.isTraceEnabled(); + case debug -> LOG.isDebugEnabled(); + case info -> LOG.isInfoEnabled(); + case warn -> LOG.isWarnEnabled(); + case error -> LOG.isErrorEnabled(); + }; if (!isLogLevelEnabled) { transferChunk(session); @@ -367,7 +345,7 @@ public class LogAttribute extends AbstractProcessor { protected static class FlowFilePayloadCallback implements InputStreamCallback { private String contents = ""; - private Charset charset; + private final Charset charset; public FlowFilePayloadCallback(Charset charset) { this.charset = charset; @@ -382,6 +360,4 @@ public class LogAttribute extends AbstractProcessor { return contents; } } - - } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogMessage.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogMessage.java index 40c02c563b..67d884cd16 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogMessage.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LogMessage.java @@ -30,16 +30,12 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.eclipse.jetty.util.StringUtil; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; @@ -79,43 +75,31 @@ public class LogMessage extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); + private static final List PROPERTIES = List.of( + LOG_LEVEL, + LOG_PREFIX, + LOG_MESSAGE + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("All FlowFiles are routed to this relationship") .build(); - private static final int CHUNK_SIZE = 50; + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); enum MessageLogLevel { - trace, debug, info, warn, error } - private Set relationships; - private List supportedDescriptors; - - @Override - protected void init(final ProcessorInitializationContext context) { - final Set procRels = new HashSet<>(); - procRels.add(REL_SUCCESS); - relationships = Collections.unmodifiableSet(procRels); - - // descriptors - final List supDescriptors = new ArrayList<>(); - supDescriptors.add(LOG_LEVEL); - supDescriptors.add(LOG_PREFIX); - supDescriptors.add(LOG_MESSAGE); - supportedDescriptors = Collections.unmodifiableList(supDescriptors); - } - @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return supportedDescriptors; + return PROPERTIES; } @Override @@ -136,24 +120,13 @@ public class LogMessage extends AbstractProcessor { } final ComponentLog logger = getLogger(); - boolean isLogLevelEnabled = false; - switch (logLevel) { - case trace: - isLogLevelEnabled = logger.isTraceEnabled(); - break; - case debug: - isLogLevelEnabled = logger.isDebugEnabled(); - break; - case info: - isLogLevelEnabled = logger.isInfoEnabled(); - break; - case warn: - isLogLevelEnabled = logger.isWarnEnabled(); - break; - case error: - isLogLevelEnabled = logger.isErrorEnabled(); - break; - } + boolean isLogLevelEnabled = switch (logLevel) { + case trace -> logger.isTraceEnabled(); + case debug -> logger.isDebugEnabled(); + case info -> logger.isInfoEnabled(); + case warn -> logger.isWarnEnabled(); + case error -> logger.isErrorEnabled(); + }; if (isLogLevelEnabled) { processFlowFile(logger, logLevel, flowFile, context); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LookupAttribute.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LookupAttribute.java index b2817ea3fe..292d30e2db 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LookupAttribute.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LookupAttribute.java @@ -16,20 +16,7 @@ */ package org.apache.nifi.processors.standard; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; - import org.apache.commons.lang3.StringUtils; - import org.apache.nifi.annotation.behavior.DynamicProperty; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; @@ -52,11 +39,21 @@ import org.apache.nifi.lookup.StringLookupService; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + @SideEffectFree @SupportsBatching @InputRequirement(Requirement.INPUT_REQUIRED) @@ -88,6 +85,11 @@ public class LookupAttribute extends AbstractProcessor { .required(true) .build(); + private static final List PROPERTIES = List.of( + LOOKUP_SERVICE, + INCLUDE_EMPTY_VALUES + ); + public static final Relationship REL_MATCHED = new Relationship.Builder() .description("FlowFiles with matching lookups are routed to this relationship") .name("matched") @@ -103,9 +105,11 @@ public class LookupAttribute extends AbstractProcessor { .name("failure") .build(); - private List descriptors; - - private Set relationships; + private static final Set RELATIONSHIPS = Set.of( + REL_MATCHED, + REL_UNMATCHED, + REL_FAILURE + ); private Map dynamicProperties; @@ -114,10 +118,10 @@ public class LookupAttribute extends AbstractProcessor { final List errors = new ArrayList<>(super.customValidate(validationContext)); final Set dynamicProperties = validationContext.getProperties().keySet().stream() - .filter(prop -> prop.isDynamic()) + .filter(PropertyDescriptor::isDynamic) .collect(Collectors.toSet()); - if (dynamicProperties == null || dynamicProperties.size() < 1) { + if (dynamicProperties.isEmpty()) { errors.add(new ValidationResult.Builder() .subject("User-Defined Properties") .valid(false) @@ -139,7 +143,7 @@ public class LookupAttribute extends AbstractProcessor { @Override protected List getSupportedPropertyDescriptors() { - return descriptors; + return PROPERTIES; } @Override @@ -156,21 +160,7 @@ public class LookupAttribute extends AbstractProcessor { @Override public Set getRelationships() { - return relationships; - } - - @Override - protected void init(final ProcessorInitializationContext context) { - final List descriptors = new ArrayList(); - descriptors.add(LOOKUP_SERVICE); - descriptors.add(INCLUDE_EMPTY_VALUES); - this.descriptors = Collections.unmodifiableList(descriptors); - - final Set relationships = new HashSet(); - relationships.add(REL_MATCHED); - relationships.add(REL_UNMATCHED); - relationships.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(relationships); + return RELATIONSHIPS; } @OnScheduled @@ -184,7 +174,7 @@ public class LookupAttribute extends AbstractProcessor { dynamicProperties.put(descriptor, value); } } - this.dynamicProperties = Collections.unmodifiableMap(dynamicProperties); + this.dynamicProperties = Map.copyOf(dynamicProperties); } @Override @@ -236,7 +226,6 @@ public class LookupAttribute extends AbstractProcessor { logger.error(e.getMessage(), e); session.transfer(flowFile, REL_FAILURE); } - } private boolean putAttribute(final String attributeName, final Optional attributeValue, final Map attributes, final boolean includeEmptyValues, final ComponentLog logger) { @@ -250,5 +239,4 @@ public class LookupAttribute extends AbstractProcessor { } return matched; } - } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LookupRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LookupRecord.java index 370a5a35dd..b03116e220 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LookupRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/LookupRecord.java @@ -222,6 +222,18 @@ public class LookupRecord extends AbstractProcessor { .required(true) .build(); + private static final List PROPERTIES = List.of( + RECORD_READER, + RECORD_WRITER, + LOOKUP_SERVICE, + ROOT_RECORD_PATH, + ROUTING_STRATEGY, + RESULT_CONTENTS, + REPLACEMENT_STRATEGY, + RESULT_RECORD_PATH, + CACHE_SIZE + ); + static final Relationship REL_MATCHED = new Relationship.Builder() .name("matched") .description("All records for which the lookup returns a value will be routed to this relationship") @@ -239,11 +251,14 @@ public class LookupRecord extends AbstractProcessor { .description("If a FlowFile cannot be enriched, the unchanged FlowFile will be routed to this relationship") .build(); - private static final Set MATCHED_COLLECTION = Collections.singleton(REL_MATCHED); - private static final Set UNMATCHED_COLLECTION = Collections.singleton(REL_UNMATCHED); - private static final Set SUCCESS_COLLECTION = Collections.singleton(REL_SUCCESS); + private static final Set MATCHED_COLLECTION = Set.of(REL_MATCHED); + private static final Set UNMATCHED_COLLECTION = Set.of(REL_UNMATCHED); + private static final Set SUCCESS_COLLECTION = Set.of(REL_SUCCESS); - private volatile Set relationships = new HashSet<>(Arrays.asList(REL_SUCCESS, REL_FAILURE)); + private volatile Set relationships = Set.of( + REL_SUCCESS, + REL_FAILURE + ); private volatile boolean routeToMatchedUnmatched = false; @OnScheduled @@ -258,17 +273,7 @@ public class LookupRecord extends AbstractProcessor { @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(); - properties.add(RECORD_READER); - properties.add(RECORD_WRITER); - properties.add(LOOKUP_SERVICE); - properties.add(ROOT_RECORD_PATH); - properties.add(ROUTING_STRATEGY); - properties.add(RESULT_CONTENTS); - properties.add(REPLACEMENT_STRATEGY); - properties.add(RESULT_RECORD_PATH); - properties.add(CACHE_SIZE); - return properties; + return PROPERTIES; } @Override @@ -292,7 +297,7 @@ public class LookupRecord extends AbstractProcessor { .collect(Collectors.toSet()); if (dynamicPropNames.isEmpty()) { - return Collections.singleton(new ValidationResult.Builder() + return Set.of(new ValidationResult.Builder() .subject("User-Defined Properties") .valid(false) .explanation("At least one user-defined property must be specified.") @@ -304,7 +309,7 @@ public class LookupRecord extends AbstractProcessor { if (validationContext.getProperty(REPLACEMENT_STRATEGY).getValue().equals(REPLACE_EXISTING_VALUES.getValue())) { // it must be a single key lookup service if (requiredKeys.size() != 1) { - return Collections.singleton(new ValidationResult.Builder() + return Set.of(new ValidationResult.Builder() .subject(LOOKUP_SERVICE.getDisplayName()) .valid(false) .explanation("When using \"" + REPLACE_EXISTING_VALUES.getDisplayName() + "\" as Record Update Strategy, " @@ -340,18 +345,11 @@ public class LookupRecord extends AbstractProcessor { public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) { if (ROUTING_STRATEGY.equals(descriptor)) { if (ROUTE_TO_MATCHED_UNMATCHED.getValue().equalsIgnoreCase(newValue)) { - final Set matchedUnmatchedRels = new HashSet<>(); - matchedUnmatchedRels.add(REL_MATCHED); - matchedUnmatchedRels.add(REL_UNMATCHED); - matchedUnmatchedRels.add(REL_FAILURE); - this.relationships = matchedUnmatchedRels; + this.relationships = Set.of(REL_MATCHED, REL_UNMATCHED, REL_FAILURE); this.routeToMatchedUnmatched = true; } else { - final Set successRels = new HashSet<>(); - successRels.add(REL_SUCCESS); - successRels.add(REL_FAILURE); - this.relationships = successRels; + this.relationships = Set.of(REL_SUCCESS, REL_FAILURE); this.routeToMatchedUnmatched = false; } @@ -674,9 +672,7 @@ public class LookupRecord extends AbstractProcessor { final RecordPathResult resultPathResult = resultPath.evaluate(record); final String resultContentsValue = context.getProperty(RESULT_CONTENTS).getValue(); - if (RESULT_RECORD_FIELDS.getValue().equals(resultContentsValue) && lookupValue instanceof Record) { - final Record lookupRecord = (Record) lookupValue; - + if (RESULT_RECORD_FIELDS.getValue().equals(resultContentsValue) && lookupValue instanceof Record lookupRecord) { // User wants to add all fields of the resultant Record to the specified Record Path. // If the destination Record Path returns to us a Record, then we will add all field values of // the Lookup Record to the destination Record. However, if the destination Record Path returns diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java index 4b25ac7904..eef78792c6 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeContent.java @@ -280,7 +280,6 @@ public class MergeContent extends BinFiles { MERGE_FORMAT_AVRO_VALUE, "The Avro contents of all FlowFiles will be concatenated together into a single FlowFile"); - public static final String TAR_PERMISSIONS_ATTRIBUTE = "tar.permissions"; public static final String MERGE_COUNT_ATTRIBUTE = "merge.count"; public static final String MERGE_BIN_AGE_ATTRIBUTE = "merge.bin.age"; @@ -398,46 +397,50 @@ public class MergeContent extends BinFiles { .dependsOn(MERGE_FORMAT, MERGE_FORMAT_TAR) .build(); + private static final List PROPERTIES = List.of( + MERGE_STRATEGY, + MERGE_FORMAT, + AttributeStrategyUtil.ATTRIBUTE_STRATEGY, + CORRELATION_ATTRIBUTE_NAME, + METADATA_STRATEGY, + addBinPackingDependency(MIN_ENTRIES), + addBinPackingDependency(MAX_ENTRIES), + addBinPackingDependency(MIN_SIZE), + addBinPackingDependency(MAX_SIZE), + MAX_BIN_AGE, + MAX_BIN_COUNT, + DELIMITER_STRATEGY, + HEADER, + FOOTER, + DEMARCATOR, + COMPRESSION_LEVEL, + KEEP_PATH, + TAR_MODIFIED_TIME + ); + public static final Relationship REL_MERGED = new Relationship.Builder().name("merged").description("The FlowFile containing the merged content").build(); + private static final Set RELATIONSHIPS = Set.of( + REL_ORIGINAL, + REL_FAILURE, + REL_MERGED + ); + public static final Pattern NUMBER_PATTERN = Pattern.compile("\\d+"); + // Convenience method to make creation of property descriptors cleaner + private static PropertyDescriptor addBinPackingDependency(final PropertyDescriptor original) { + return new PropertyDescriptor.Builder().fromPropertyDescriptor(original).dependsOn(MERGE_STRATEGY, MERGE_STRATEGY_BIN_PACK).build(); + } + @Override public Set getRelationships() { - final Set relationships = new HashSet<>(); - relationships.add(REL_ORIGINAL); - relationships.add(REL_FAILURE); - relationships.add(REL_MERGED); - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - final List descriptors = new ArrayList<>(); - descriptors.add(MERGE_STRATEGY); - descriptors.add(MERGE_FORMAT); - descriptors.add(AttributeStrategyUtil.ATTRIBUTE_STRATEGY); - descriptors.add(CORRELATION_ATTRIBUTE_NAME); - descriptors.add(METADATA_STRATEGY); - descriptors.add(addBinPackingDependency(MIN_ENTRIES)); - descriptors.add(addBinPackingDependency(MAX_ENTRIES)); - descriptors.add(addBinPackingDependency(MIN_SIZE)); - descriptors.add(addBinPackingDependency(MAX_SIZE)); - descriptors.add(MAX_BIN_AGE); - descriptors.add(MAX_BIN_COUNT); - descriptors.add(DELIMITER_STRATEGY); - descriptors.add(HEADER); - descriptors.add(FOOTER); - descriptors.add(DEMARCATOR); - descriptors.add(COMPRESSION_LEVEL); - descriptors.add(KEEP_PATH); - descriptors.add(TAR_MODIFIED_TIME); - return descriptors; - } - - // Convenience method to make creation of property descriptors cleaner - private PropertyDescriptor addBinPackingDependency(final PropertyDescriptor original) { - return new PropertyDescriptor.Builder().fromPropertyDescriptor(original).dependsOn(MERGE_STRATEGY, MERGE_STRATEGY_BIN_PACK).build(); + return PROPERTIES; } @Override @@ -501,32 +504,19 @@ public class MergeContent extends BinFiles { protected BinProcessingResult processBin(final Bin bin, final ProcessContext context) throws ProcessException { final BinProcessingResult binProcessingResult = new BinProcessingResult(true); final String mergeFormat = context.getProperty(MERGE_FORMAT).getValue(); - MergeBin merger; - switch (mergeFormat) { - case MERGE_FORMAT_TAR_VALUE: - merger = new TarMerge(); - break; - case MERGE_FORMAT_ZIP_VALUE: - merger = new ZipMerge(context.getProperty(COMPRESSION_LEVEL).asInteger()); - break; - case MERGE_FORMAT_FLOWFILE_STREAM_V3_VALUE: - merger = new FlowFileStreamMerger(new FlowFilePackagerV3(), StandardFlowFileMediaType.VERSION_3.getMediaType()); - break; - case MERGE_FORMAT_FLOWFILE_STREAM_V2_VALUE: - merger = new FlowFileStreamMerger(new FlowFilePackagerV2(), StandardFlowFileMediaType.VERSION_2.getMediaType()); - break; - case MERGE_FORMAT_FLOWFILE_TAR_V1_VALUE: - merger = new FlowFileStreamMerger(new FlowFilePackagerV1(), StandardFlowFileMediaType.VERSION_1.getMediaType()); - break; - case MERGE_FORMAT_CONCAT_VALUE: - merger = new BinaryConcatenationMerge(); - break; - case MERGE_FORMAT_AVRO_VALUE: - merger = new AvroMerge(); - break; - default: - throw new AssertionError(); - } + MergeBin merger = switch (mergeFormat) { + case MERGE_FORMAT_TAR_VALUE -> new TarMerge(); + case MERGE_FORMAT_ZIP_VALUE -> new ZipMerge(context.getProperty(COMPRESSION_LEVEL).asInteger()); + case MERGE_FORMAT_FLOWFILE_STREAM_V3_VALUE -> + new FlowFileStreamMerger(new FlowFilePackagerV3(), StandardFlowFileMediaType.VERSION_3.getMediaType()); + case MERGE_FORMAT_FLOWFILE_STREAM_V2_VALUE -> + new FlowFileStreamMerger(new FlowFilePackagerV2(), StandardFlowFileMediaType.VERSION_2.getMediaType()); + case MERGE_FORMAT_FLOWFILE_TAR_V1_VALUE -> + new FlowFileStreamMerger(new FlowFilePackagerV1(), StandardFlowFileMediaType.VERSION_1.getMediaType()); + case MERGE_FORMAT_CONCAT_VALUE -> new BinaryConcatenationMerge(); + case MERGE_FORMAT_AVRO_VALUE -> new AvroMerge(); + default -> throw new AssertionError(); + }; final AttributeStrategy attributeStrategy = AttributeStrategyUtil.strategyFor(context); @@ -546,7 +536,7 @@ public class MergeContent extends BinFiles { return binProcessingResult; } - Collections.sort(contents, new FragmentComparator()); + contents.sort(new FragmentComparator()); } FlowFile bundle = merger.merge(bin, context); @@ -736,8 +726,8 @@ public class MergeContent extends BinFiles { private byte[] getDelimiterFileContent(final ProcessContext context, final List flowFiles, final PropertyDescriptor descriptor) throws IOException { byte[] property = null; - if (flowFiles != null && flowFiles.size() > 0) { - final FlowFile flowFile = flowFiles.get(0); + if (flowFiles != null && !flowFiles.isEmpty()) { + final FlowFile flowFile = flowFiles.getFirst(); if (flowFile != null) { final String value = context.getProperty(descriptor).evaluateAttributeExpressions(flowFile).getValue(); if (value != null) { @@ -750,8 +740,8 @@ public class MergeContent extends BinFiles { private byte[] getDelimiterTextContent(final ProcessContext context, final List flowFiles, final PropertyDescriptor descriptor) { byte[] property = null; - if (flowFiles != null && flowFiles.size() > 0) { - final FlowFile flowFile = flowFiles.get(0); + if (flowFiles != null && !flowFiles.isEmpty()) { + final FlowFile flowFile = flowFiles.getFirst(); if (flowFile != null) { final String value = context.getProperty(descriptor).evaluateAttributeExpressions(flowFile).getValue(); if (value != null) { @@ -784,14 +774,14 @@ public class MergeContent extends BinFiles { path = path.getNameCount() == 1 ? null : path.subpath(1, path.getNameCount()); } - return path == null ? "" : path.toString() + "/"; + return path == null ? "" : path + "/"; } private String createFilename(final List flowFiles) { if (flowFiles.size() == 1) { - return flowFiles.get(0).getAttribute(CoreAttributes.FILENAME.key()); + return flowFiles.getFirst().getAttribute(CoreAttributes.FILENAME.key()); } else { - final FlowFile ff = flowFiles.get(0); + final FlowFile ff = flowFiles.getFirst(); final String origFilename = ff.getAttribute(SEGMENT_ORIGINAL_FILENAME); if (origFilename != null) { return origFilename; @@ -870,7 +860,7 @@ public class MergeContent extends BinFiles { private long getMaxEntrySize(final List contents) { final OptionalLong maxSize = contents.stream() .parallel() - .mapToLong(ff -> ff.getSize()) + .mapToLong(FlowFile::getSize) .max(); return maxSize.orElse(0L); } @@ -1029,7 +1019,7 @@ public class MergeContent extends BinFiles { final Map metadata = new TreeMap<>(); final AtomicReference schema = new AtomicReference<>(null); final AtomicReference inputCodec = new AtomicReference<>(null); - final DataFileWriter writer = new DataFileWriter<>(new GenericDatumWriter()); + final DataFileWriter writer = new DataFileWriter<>(new GenericDatumWriter<>()); // we don't pass the parents to the #create method because the parents belong to different sessions FlowFile bundle = session.create(contents); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeRecord.java index f665ca552f..727f384e6a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MergeRecord.java @@ -62,7 +62,6 @@ import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collection; -import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; @@ -186,7 +185,6 @@ public class MergeRecord extends AbstractSessionFactoryProcessor { + "will be grouped together. All FlowFiles in this group must have the same value for the \"fragment.count\" attribute. The ordering of " + "the Records that are output is not guaranteed."); - public static final PropertyDescriptor RECORD_READER = new PropertyDescriptor.Builder() .name("record-reader") .displayName("Record Reader") @@ -277,7 +275,19 @@ public class MergeRecord extends AbstractSessionFactoryProcessor { .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .build(); - + private static final List PROPERTIES = List.of( + RECORD_READER, + RECORD_WRITER, + MERGE_STRATEGY, + CORRELATION_ATTRIBUTE_NAME, + AttributeStrategyUtil.ATTRIBUTE_STRATEGY, + MIN_RECORDS, + MAX_RECORDS, + MIN_SIZE, + MAX_SIZE, + MAX_BIN_AGE, + MAX_BIN_COUNT + ); public static final Relationship REL_MERGED = new Relationship.Builder() .name("merged") @@ -292,37 +302,24 @@ public class MergeRecord extends AbstractSessionFactoryProcessor { .description("If the bundle cannot be created, all FlowFiles that would have been used to created the bundle will be transferred to failure") .build(); - private final AtomicReference binManager = new AtomicReference<>(); + private static final Set RELATIONSHIPS = Set.of( + REL_ORIGINAL, + REL_FAILURE, + REL_MERGED + ); + private final AtomicReference binManager = new AtomicReference<>(); @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(); - properties.add(RECORD_READER); - properties.add(RECORD_WRITER); - properties.add(MERGE_STRATEGY); - properties.add(CORRELATION_ATTRIBUTE_NAME); - properties.add(AttributeStrategyUtil.ATTRIBUTE_STRATEGY); - properties.add(MIN_RECORDS); - properties.add(MAX_RECORDS); - properties.add(MIN_SIZE); - properties.add(MAX_SIZE); - properties.add(MAX_BIN_AGE); - properties.add(MAX_BIN_COUNT); - return properties; + return PROPERTIES; } - @Override public Set getRelationships() { - final Set relationships = new HashSet<>(); - relationships.add(REL_ORIGINAL); - relationships.add(REL_FAILURE); - relationships.add(REL_MERGED); - return relationships; + return RELATIONSHIPS; } - @OnStopped public final void resetState() { final RecordBinManager manager = binManager.get(); @@ -476,7 +473,6 @@ public class MergeRecord extends AbstractSessionFactoryProcessor { } } - protected String getGroupId(final ProcessContext context, final FlowFile flowFile, final RecordSchema schema, final ProcessSession session) { final String mergeStrategy = context.getProperty(MERGE_STRATEGY).getValue(); if (MERGE_STRATEGY_DEFRAGMENT.getValue().equals(mergeStrategy)) { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java index bbbfe4917a..2ea191cca0 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ModifyBytes.java @@ -16,16 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.TimeUnit; - import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.SideEffectFree; @@ -48,6 +38,13 @@ import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.stream.io.StreamUtils; import org.apache.nifi.util.StopWatch; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; + @SideEffectFree @SupportsBatching @Tags({"binary", "discard", "keep"}) @@ -60,7 +57,8 @@ public class ModifyBytes extends AbstractProcessor { .name("success") .description("Processed flowfiles.") .build(); - private final Set relationships; + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); + public static final PropertyDescriptor START_OFFSET = new PropertyDescriptor.Builder() .name("Start Offset") .displayName("Start Offset") @@ -87,28 +85,20 @@ public class ModifyBytes extends AbstractProcessor { .allowableValues("true", "false") .defaultValue("false") .build(); - private final List propDescriptors; - - public ModifyBytes() { - HashSet r = new HashSet<>(); - r.add(REL_SUCCESS); - relationships = Collections.unmodifiableSet(r); - - ArrayList pds = new ArrayList<>(); - pds.add(START_OFFSET); - pds.add(END_OFFSET); - pds.add(REMOVE_ALL); - propDescriptors = Collections.unmodifiableList(pds); - } + private static final List PROPERTIES = List.of( + START_OFFSET, + END_OFFSET, + REMOVE_ALL + ); @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return propDescriptors; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java index 3d7ebfef4c..cd3dfe8d3d 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/MonitorActivity.java @@ -16,20 +16,6 @@ */ package org.apache.nifi.processors.standard; -import static java.util.Collections.singletonMap; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.SideEffectFree; @@ -54,11 +40,22 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; + +import static java.util.Collections.singletonMap; + @SideEffectFree @TriggerSerially @TriggerWhenEmpty @@ -165,6 +162,18 @@ public class MonitorActivity extends AbstractProcessor { .defaultValue(REPORT_NODE_ALL.getValue()) .build(); + private static final List PROPERTIES = List.of( + THRESHOLD, + CONTINUALLY_SEND_MESSAGES, + INACTIVITY_MESSAGE, + ACTIVITY_RESTORED_MESSAGE, + WAIT_FOR_ACTIVITY, + RESET_STATE_ON_RESTART, + COPY_ATTRIBUTES, + MONITORING_SCOPE, + REPORTING_NODE + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("All incoming FlowFiles are routed to success") @@ -180,8 +189,11 @@ public class MonitorActivity extends AbstractProcessor { + "period of inactivity") .build(); - private List properties; - private Set relationships; + private final static Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_INACTIVE, + REL_ACTIVITY_RESTORED + ); private final AtomicBoolean connectedWhenLastTriggered = new AtomicBoolean(false); private final AtomicLong lastInactiveMessage = new AtomicLong(); @@ -190,35 +202,14 @@ public class MonitorActivity extends AbstractProcessor { private volatile LocalFlowActivityInfo localFlowActivityInfo; - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(THRESHOLD); - properties.add(CONTINUALLY_SEND_MESSAGES); - properties.add(INACTIVITY_MESSAGE); - properties.add(ACTIVITY_RESTORED_MESSAGE); - properties.add(WAIT_FOR_ACTIVITY); - properties.add(RESET_STATE_ON_RESTART); - properties.add(COPY_ATTRIBUTES); - properties.add(MONITORING_SCOPE); - properties.add(REPORTING_NODE); - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - relationships.add(REL_INACTIVE); - relationships.add(REL_ACTIVITY_RESTORED); - this.relationships = Collections.unmodifiableSet(relationships); - } - @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @OnScheduled @@ -277,7 +268,7 @@ public class MonitorActivity extends AbstractProcessor { final boolean firstKnownTransfer = !localFlowActivityInfo.hasSuccessfulTransfer(); final boolean flowStateMustBecomeActive = !wasActive || firstKnownTransfer; - localFlowActivityInfo.update(flowFiles.get(0)); + localFlowActivityInfo.update(flowFiles.getFirst()); if (isClusterScope && flowStateMustBecomeActive) { localFlowActivityInfo.forceSync(); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Notify.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Notify.java index 274091cc88..a0e72fe72c 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Notify.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Notify.java @@ -16,15 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; @@ -47,6 +38,13 @@ import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + @SupportsBatching @Tags({"map", "cache", "notify", "distributed", "signal", "release"}) @InputRequirement(Requirement.INPUT_REQUIRED) @@ -134,6 +132,15 @@ public class Notify extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.NONE) .build(); + private static final List PROPERTIES = List.of( + RELEASE_SIGNAL_IDENTIFIER, + SIGNAL_COUNTER_NAME, + SIGNAL_COUNTER_DELTA, + SIGNAL_BUFFER_COUNT, + DISTRIBUTED_CACHE_SERVICE, + ATTRIBUTE_CACHE_REGEX + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("All FlowFiles where the release signal has been successfully entered in the cache will be routed to this relationship") @@ -144,30 +151,19 @@ public class Notify extends AbstractProcessor { .description("When the cache cannot be reached, or if the Release Signal Identifier evaluates to null or empty, FlowFiles will be routed to this relationship") .build(); - private final Set relationships; - - public Notify() { - final Set rels = new HashSet<>(); - rels.add(REL_SUCCESS); - rels.add(REL_FAILURE); - relationships = Collections.unmodifiableSet(rels); - } + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); @Override protected List getSupportedPropertyDescriptors() { - final List descriptors = new ArrayList<>(); - descriptors.add(RELEASE_SIGNAL_IDENTIFIER); - descriptors.add(SIGNAL_COUNTER_NAME); - descriptors.add(SIGNAL_COUNTER_DELTA); - descriptors.add(SIGNAL_BUFFER_COUNT); - descriptors.add(DISTRIBUTED_CACHE_SERVICE); - descriptors.add(ATTRIBUTE_CACHE_REGEX); - return descriptors; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } private class SignalBuffer { @@ -177,7 +173,7 @@ public class Notify extends AbstractProcessor { final List flowFiles = new ArrayList<>(); int incrementDelta(final String counterName, final int delta) { - int current = deltas.containsKey(counterName) ? deltas.get(counterName) : 0; + int current = deltas.getOrDefault(counterName, 0); // Zero (0) clears count. int updated = delta == 0 ? 0 : current + delta; deltas.put(counterName, updated); @@ -253,7 +249,6 @@ public class Notify extends AbstractProcessor { if (logger.isDebugEnabled()) { logger.debug("Cached release signal identifier {} counterName {} from FlowFile {}", signalId, counterName, flowFile); } - } signalBuffers.forEach((signalId, signalBuffer) -> { @@ -268,5 +263,4 @@ public class Notify extends AbstractProcessor { } }); } - } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PackageFlowFile.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PackageFlowFile.java index 7b00924b86..40c4bbca17 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PackageFlowFile.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PackageFlowFile.java @@ -130,6 +130,8 @@ public class PackageFlowFile extends AbstractProcessor { .addValidator(StandardValidators.createLongValidator(1, 10_000, true)) .build(); + private static final List PROPERTIES = List.of(BATCH_SIZE); + static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("The packaged FlowFile is sent to this relationship") @@ -139,15 +141,11 @@ public class PackageFlowFile extends AbstractProcessor { .description("The FlowFiles that were used to create the package are sent to this relationship") .build(); - private static final Set RELATIONSHIPS = Set.of( + private static final Set RELATIONSHIPS = Set.of( REL_SUCCESS, REL_ORIGINAL ); - private static final List PROPERTY_DESCRIPTORS = List.of( - BATCH_SIZE - ); - @Override public Set getRelationships() { return RELATIONSHIPS; @@ -155,7 +153,7 @@ public class PackageFlowFile extends AbstractProcessor { @Override protected List getSupportedPropertyDescriptors() { - return PROPERTY_DESCRIPTORS; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ParseSyslog.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ParseSyslog.java index de99dc0814..b76d448647 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ParseSyslog.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ParseSyslog.java @@ -43,9 +43,7 @@ import org.apache.nifi.syslog.parsers.SyslogParser; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; -import java.util.ArrayList; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -79,6 +77,8 @@ public class ParseSyslog extends AbstractProcessor { .addValidator(StandardValidators.CHARACTER_SET_VALIDATOR) .build(); + private static final List PROPERTIES = List.of(CHARSET); + static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description("Any FlowFile that could not be parsed as a Syslog message will be transferred to this Relationship without any attributes being added") @@ -88,22 +88,21 @@ public class ParseSyslog extends AbstractProcessor { .description("Any FlowFile that is successfully parsed as a Syslog message will be to this Relationship.") .build(); - private SyslogParser parser; + private static final Set RELATIONSHIPS = Set.of( + REL_FAILURE, + REL_SUCCESS + ); + private SyslogParser parser; @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(1); - properties.add(CHARSET); - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - final Set relationships = new HashSet<>(); - relationships.add(REL_FAILURE); - relationships.add(REL_SUCCESS); - return relationships; + return RELATIONSHIPS; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ParseSyslog5424.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ParseSyslog5424.java index 3b3b32b8e0..e0b3fb4c2a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ParseSyslog5424.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ParseSyslog5424.java @@ -38,17 +38,15 @@ import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.stream.io.StreamUtils; +import org.apache.nifi.syslog.attributes.SyslogAttributes; +import org.apache.nifi.syslog.events.Syslog5424Event; import org.apache.nifi.syslog.keyproviders.SyslogPrefixedKeyProvider; +import org.apache.nifi.syslog.parsers.StrictSyslog5424Parser; import org.apache.nifi.syslog.utils.NifiStructuredDataPolicy; import org.apache.nifi.syslog.utils.NilHandlingPolicy; -import org.apache.nifi.syslog.parsers.StrictSyslog5424Parser; -import org.apache.nifi.syslog.events.Syslog5424Event; -import org.apache.nifi.syslog.attributes.SyslogAttributes; import java.nio.charset.Charset; -import java.util.ArrayList; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -113,6 +111,12 @@ public class ParseSyslog5424 extends AbstractProcessor { .defaultValue("true") .build(); + private static final List PROPERTIES = List.of( + CHARSET, + NIL_POLICY, + INCLUDE_BODY_IN_ATTRIBUTES + ); + static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description("Any FlowFile that could not be parsed as a Syslog message will be transferred to this Relationship without any attributes being added") @@ -122,25 +126,23 @@ public class ParseSyslog5424 extends AbstractProcessor { .description("Any FlowFile that is successfully parsed as a Syslog message will be to this Relationship.") .build(); + private static final Set RELATIONSHIPS = Set.of( + REL_FAILURE, + REL_SUCCESS + ); + private volatile StrictSyslog5424Parser parser; private volatile Charset charset; @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(2); - properties.add(CHARSET); - properties.add(NIL_POLICY); - properties.add(INCLUDE_BODY_IN_ATTRIBUTES); - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - final Set relationships = new HashSet<>(); - relationships.add(REL_FAILURE); - relationships.add(REL_SUCCESS); - return relationships; + return RELATIONSHIPS; } @OnScheduled diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PartitionRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PartitionRecord.java index d24985e624..63e0042c01 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PartitionRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PartitionRecord.java @@ -55,12 +55,10 @@ import org.apache.nifi.serialization.record.util.DataTypeUtils; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -143,6 +141,11 @@ public class PartitionRecord extends AbstractProcessor { .required(true) .build(); + private static final List PROPERTIES = List.of( + RECORD_READER, + RECORD_WRITER + ); + static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("FlowFiles that are successfully partitioned will be routed to this relationship") @@ -157,33 +160,32 @@ public class PartitionRecord extends AbstractProcessor { + "the unchanged FlowFile will be routed to this relationship") .build(); + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE, + REL_ORIGINAL + ); + @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(); - properties.add(RECORD_READER); - properties.add(RECORD_WRITER); - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - relationships.add(REL_FAILURE); - relationships.add(REL_ORIGINAL); - return relationships; + return RELATIONSHIPS; } @Override protected Collection customValidate(final ValidationContext validationContext) { final boolean hasDynamic = validationContext.getProperties().keySet().stream() - .anyMatch(prop -> prop.isDynamic()); + .anyMatch(PropertyDescriptor::isDynamic); if (hasDynamic) { return Collections.emptyList(); } - return Collections.singleton(new ValidationResult.Builder() + return Set.of(new ValidationResult.Builder() .subject("User-defined Properties") .valid(false) .explanation("At least one RecordPath must be added to this processor by adding a user-defined property") @@ -214,10 +216,10 @@ public class PartitionRecord extends AbstractProcessor { final Map recordPaths; try { recordPaths = context.getProperties().keySet().stream() - .filter(prop -> prop.isDynamic()) - .collect(Collectors.toMap( - prop -> prop.getName(), - prop -> getRecordPath(context, prop, flowFile))); + .filter(PropertyDescriptor::isDynamic) + .collect(Collectors.toMap( + PropertyDescriptor::getName, + prop -> getRecordPath(context, prop, flowFile))); } catch (final Exception e) { getLogger().error("Failed to compile RecordPath for {}; routing to failure", flowFile, e); session.transfer(flowFile, REL_FAILURE); @@ -324,8 +326,7 @@ public class PartitionRecord extends AbstractProcessor { private RecordPath getRecordPath(final ProcessContext context, final PropertyDescriptor prop, final FlowFile flowFile) { final String pathText = context.getProperty(prop).evaluateAttributeExpressions(flowFile).getValue(); - final RecordPath recordPath = recordPathCache.getCompiled(pathText); - return recordPath; + return recordPathCache.getCompiled(pathText); } /** @@ -365,10 +366,9 @@ public class PartitionRecord extends AbstractProcessor { if (obj == null) { return false; } - if (!(obj instanceof ValueWrapper)) { + if (!(obj instanceof ValueWrapper other)) { return false; } - final ValueWrapper other = (ValueWrapper) obj; if (value == null && other.value == null) { return true; } @@ -401,7 +401,7 @@ public class PartitionRecord extends AbstractProcessor { } // If value is null, don't create an attribute - final Object value = values.get(0).get(); + final Object value = values.getFirst().get(); if (value == null) { continue; } @@ -440,10 +440,9 @@ public class PartitionRecord extends AbstractProcessor { if (obj == null) { return false; } - if (!(obj instanceof RecordValueMap)) { + if (!(obj instanceof RecordValueMap other)) { return false; } - final RecordValueMap other = (RecordValueMap) obj; return values.equals(other.values); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDatabaseRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDatabaseRecord.java index b287c68b85..541eca9613 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDatabaseRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDatabaseRecord.java @@ -78,7 +78,6 @@ import java.sql.Types; import java.util.ArrayList; import java.util.Base64; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.HexFormat; @@ -89,12 +88,11 @@ import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; -import java.util.stream.Collectors; import static java.lang.String.format; +import static org.apache.nifi.expression.ExpressionLanguageScope.ENVIRONMENT; import static org.apache.nifi.expression.ExpressionLanguageScope.FLOWFILE_ATTRIBUTES; import static org.apache.nifi.expression.ExpressionLanguageScope.NONE; -import static org.apache.nifi.expression.ExpressionLanguageScope.ENVIRONMENT; @InputRequirement(Requirement.INPUT_REQUIRED) @Tags({"sql", "record", "jdbc", "put", "database", "update", "insert", "delete"}) @@ -156,7 +154,11 @@ public class PutDatabaseRecord extends AbstractProcessor { + "such as an invalid query or an integrity constraint violation") .build(); - protected static Set relationships; + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE, + REL_RETRY + ); // Properties static final PropertyDescriptor RECORD_READER_FACTORY = new Builder() @@ -387,7 +389,7 @@ public class PutDatabaseRecord extends AbstractProcessor { static final PropertyDescriptor DB_TYPE; protected static final Map dbAdapters; - protected static List propDescriptors; + protected static List properties; private Cache schemaCache; static { @@ -410,53 +412,45 @@ public class PutDatabaseRecord extends AbstractProcessor { .required(false) .build(); - final Set r = new HashSet<>(); - r.add(REL_SUCCESS); - r.add(REL_FAILURE); - r.add(REL_RETRY); - relationships = Collections.unmodifiableSet(r); - - final List pds = new ArrayList<>(); - pds.add(RECORD_READER_FACTORY); - pds.add(DB_TYPE); - pds.add(STATEMENT_TYPE); - pds.add(STATEMENT_TYPE_RECORD_PATH); - pds.add(DATA_RECORD_PATH); - pds.add(DBCP_SERVICE); - pds.add(CATALOG_NAME); - pds.add(SCHEMA_NAME); - pds.add(TABLE_NAME); - pds.add(BINARY_STRING_FORMAT); - pds.add(TRANSLATE_FIELD_NAMES); - pds.add(UNMATCHED_FIELD_BEHAVIOR); - pds.add(UNMATCHED_COLUMN_BEHAVIOR); - pds.add(UPDATE_KEYS); - pds.add(FIELD_CONTAINING_SQL); - pds.add(ALLOW_MULTIPLE_STATEMENTS); - pds.add(QUOTE_IDENTIFIERS); - pds.add(QUOTE_TABLE_IDENTIFIER); - pds.add(QUERY_TIMEOUT); - pds.add(RollbackOnFailure.ROLLBACK_ON_FAILURE); - pds.add(TABLE_SCHEMA_CACHE_SIZE); - pds.add(MAX_BATCH_SIZE); - pds.add(AUTO_COMMIT); - - propDescriptors = Collections.unmodifiableList(pds); + properties = List.of( + RECORD_READER_FACTORY, + DB_TYPE, + STATEMENT_TYPE, + STATEMENT_TYPE_RECORD_PATH, + DATA_RECORD_PATH, + DBCP_SERVICE, + CATALOG_NAME, + SCHEMA_NAME, + TABLE_NAME, + BINARY_STRING_FORMAT, + TRANSLATE_FIELD_NAMES, + UNMATCHED_FIELD_BEHAVIOR, + UNMATCHED_COLUMN_BEHAVIOR, + UPDATE_KEYS, + FIELD_CONTAINING_SQL, + ALLOW_MULTIPLE_STATEMENTS, + QUOTE_IDENTIFIERS, + QUOTE_TABLE_IDENTIFIER, + QUERY_TIMEOUT, + RollbackOnFailure.ROLLBACK_ON_FAILURE, + TABLE_SCHEMA_CACHE_SIZE, + MAX_BATCH_SIZE, + AUTO_COMMIT + ); } private DatabaseAdapter databaseAdapter; private volatile Function recordPathOperationType; private volatile RecordPath dataRecordPath; - @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return propDescriptors; + return properties; } @Override @@ -898,9 +892,7 @@ public class PutDatabaseRecord extends AbstractProcessor { dest[j] = (Byte) src[j]; } currentValue = dest; - } else if (currentValue instanceof String) { - final String stringValue = (String) currentValue; - + } else if (currentValue instanceof String stringValue) { if (BINARY_STRING_FORMAT_BASE64.getValue().equals(binaryStringFormat)) { currentValue = Base64.getDecoder().decode(stringValue); } else if (BINARY_STRING_FORMAT_HEXADECIMAL.getValue().equals(binaryStringFormat)) { @@ -1062,11 +1054,11 @@ public class PutDatabaseRecord extends AbstractProcessor { private List getDataRecords(final Record outerRecord) { if (dataRecordPath == null) { - return Collections.singletonList(outerRecord); + return List.of(outerRecord); } final RecordPathResult result = dataRecordPath.evaluate(outerRecord); - final List fieldValues = result.getSelectedFields().collect(Collectors.toList()); + final List fieldValues = result.getSelectedFields().toList(); if (fieldValues.isEmpty()) { throw new ProcessException("RecordPath " + dataRecordPath.getPath() + " evaluated against Record yielded no results."); } @@ -1692,7 +1684,7 @@ public class PutDatabaseRecord extends AbstractProcessor { @Override public String apply(final Record record) { final RecordPathResult recordPathResult = recordPath.evaluate(record); - final List resultList = recordPathResult.getSelectedFields().distinct().collect(Collectors.toList()); + final List resultList = recordPathResult.getSelectedFields().distinct().toList(); if (resultList.isEmpty()) { throw new ProcessException("Evaluated RecordPath " + recordPath.getPath() + " against Record but got no results"); } @@ -1701,23 +1693,16 @@ public class PutDatabaseRecord extends AbstractProcessor { throw new ProcessException("Evaluated RecordPath " + recordPath.getPath() + " against Record and received multiple distinct results (" + resultList + ")"); } - final String resultValue = String.valueOf(resultList.get(0).getValue()).toUpperCase(); - switch (resultValue) { - case INSERT_TYPE: - case UPDATE_TYPE: - case DELETE_TYPE: - case UPSERT_TYPE: - return resultValue; - case "C": - case "R": - return INSERT_TYPE; - case "U": - return UPDATE_TYPE; - case "D": - return DELETE_TYPE; - } + final String resultValue = String.valueOf(resultList.getFirst().getValue()).toUpperCase(); - throw new ProcessException("Evaluated RecordPath " + recordPath.getPath() + " against Record to determine Statement Type but found invalid value: " + resultValue); + return switch (resultValue) { + case INSERT_TYPE, UPDATE_TYPE, DELETE_TYPE, UPSERT_TYPE -> resultValue; + case "C", "R" -> INSERT_TYPE; + case "U" -> UPDATE_TYPE; + case "D" -> DELETE_TYPE; + default -> + throw new ProcessException("Evaluated RecordPath " + recordPath.getPath() + " against Record to determine Statement Type but found invalid value: " + resultValue); + }; } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDistributedMapCache.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDistributedMapCache.java index aa2423432c..c3ef29e3a3 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDistributedMapCache.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutDistributedMapCache.java @@ -16,16 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; @@ -53,6 +43,13 @@ import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Set; + @SupportsBatching @Tags({"map", "cache", "put", "distributed"}) @InputRequirement(Requirement.INPUT_REQUIRED) @@ -108,6 +105,12 @@ public class PutDistributedMapCache extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.NONE) .build(); + private static final List PROPERTIES = List.of( + CACHE_ENTRY_IDENTIFIER, + DISTRIBUTED_CACHE_SERVICE, + CACHE_UPDATE_STRATEGY, + CACHE_ENTRY_MAX_BYTES + ); public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") @@ -118,37 +121,28 @@ public class PutDistributedMapCache extends AbstractProcessor { .name("failure") .description("Any FlowFile that cannot be inserted into the cache will be routed to this relationship") .build(); - private final Set relationships; + + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); private final Serializer keySerializer = new StringSerializer(); private final Serializer valueSerializer = new CacheValueSerializer(); private final Deserializer valueDeserializer = new CacheValueDeserializer(); - public PutDistributedMapCache() { - final Set rels = new HashSet<>(); - rels.add(REL_SUCCESS); - rels.add(REL_FAILURE); - relationships = Collections.unmodifiableSet(rels); - } - @Override protected List getSupportedPropertyDescriptors() { - final List descriptors = new ArrayList<>(); - descriptors.add(CACHE_ENTRY_IDENTIFIER); - descriptors.add(DISTRIBUTED_CACHE_SERVICE); - descriptors.add(CACHE_UPDATE_STRATEGY); - descriptors.add(CACHE_ENTRY_MAX_BYTES); - return descriptors; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { - FlowFile flowFile = session.get(); if (flowFile == null) { return; diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java index 001d362041..42da4020a7 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutEmail.java @@ -56,7 +56,6 @@ import org.apache.nifi.oauth2.OAuth2AccessTokenProvider; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; @@ -68,10 +67,7 @@ import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -291,6 +287,31 @@ public class PutEmail extends AbstractProcessor { .defaultValue(StandardCharsets.UTF_8.name()) .build(); + private static final List PROPERTIES = List.of( + SMTP_HOSTNAME, + SMTP_PORT, + AUTHORIZATION_MODE, + OAUTH2_ACCESS_TOKEN_PROVIDER, + SMTP_USERNAME, + SMTP_PASSWORD, + SMTP_AUTH, + SMTP_TLS, + SMTP_SOCKET_FACTORY, + HEADER_XMAILER, + ATTRIBUTE_NAME_REGEX, + CONTENT_TYPE, + FROM, + TO, + CC, + BCC, + SUBJECT, + MESSAGE, + CONTENT_AS_MESSAGE, + INPUT_CHARACTER_SET, + ATTACH_FILE, + INCLUDE_ALL_ATTRIBUTES + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("FlowFiles that are successfully sent will be routed to this relationship") @@ -300,69 +321,33 @@ public class PutEmail extends AbstractProcessor { .description("FlowFiles that fail to send will be routed to this relationship") .build(); - - private List properties; - - private Set relationships; + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); /** * Mapping of the mail properties to the NiFi PropertyDescriptors that will be evaluated at runtime */ - private static final Map propertyToContext = new HashMap<>(); - - static { - propertyToContext.put("mail.smtp.host", SMTP_HOSTNAME); - propertyToContext.put("mail.smtp.port", SMTP_PORT); - propertyToContext.put("mail.smtp.socketFactory.port", SMTP_PORT); - propertyToContext.put("mail.smtp.socketFactory.class", SMTP_SOCKET_FACTORY); - propertyToContext.put("mail.smtp.auth", SMTP_AUTH); - propertyToContext.put("mail.smtp.starttls.enable", SMTP_TLS); - propertyToContext.put("mail.smtp.user", SMTP_USERNAME); - propertyToContext.put("mail.smtp.password", SMTP_PASSWORD); - } - - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(SMTP_HOSTNAME); - properties.add(SMTP_PORT); - properties.add(AUTHORIZATION_MODE); - properties.add(OAUTH2_ACCESS_TOKEN_PROVIDER); - properties.add(SMTP_USERNAME); - properties.add(SMTP_PASSWORD); - properties.add(SMTP_AUTH); - properties.add(SMTP_TLS); - properties.add(SMTP_SOCKET_FACTORY); - properties.add(HEADER_XMAILER); - properties.add(ATTRIBUTE_NAME_REGEX); - properties.add(CONTENT_TYPE); - properties.add(FROM); - properties.add(TO); - properties.add(CC); - properties.add(BCC); - properties.add(SUBJECT); - properties.add(MESSAGE); - properties.add(CONTENT_AS_MESSAGE); - properties.add(INPUT_CHARACTER_SET); - properties.add(ATTACH_FILE); - properties.add(INCLUDE_ALL_ATTRIBUTES); - - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - relationships.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(relationships); - } + private static final Map propertyToContext = Map.of( + "mail.smtp.host", SMTP_HOSTNAME, + "mail.smtp.port", SMTP_PORT, + "mail.smtp.socketFactory.port", SMTP_PORT, + "mail.smtp.socketFactory.class", SMTP_SOCKET_FACTORY, + "mail.smtp.auth", SMTP_AUTH, + "mail.smtp.starttls.enable", SMTP_TLS, + "mail.smtp.user", SMTP_USERNAME, + "mail.smtp.password", SMTP_PASSWORD + ); @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override @@ -694,7 +679,7 @@ public class PutEmail extends AbstractProcessor { */ private String getEncoding(final ProcessContext context) { final Charset charset = Charset.forName(context.getProperty(INPUT_CHARACTER_SET).getValue()); - if (Charset.forName("US-ASCII").equals(charset)) { + if (StandardCharsets.US_ASCII.equals(charset)) { return "7bit"; } // Every other charset in StandardCharsets use 8 bits or more. Using base64 encoding by default diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java index e627250886..bc92eec97c 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFTP.java @@ -16,17 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.TreeMap; -import java.util.concurrent.atomic.AtomicReference; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - import org.apache.nifi.annotation.behavior.DynamicProperties; import org.apache.nifi.annotation.behavior.DynamicProperty; import org.apache.nifi.annotation.behavior.InputRequirement; @@ -42,11 +31,20 @@ import org.apache.nifi.components.ValidationResult; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.processor.ProcessContext; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processor.util.file.transfer.PutFileTransfer; import org.apache.nifi.processors.standard.util.FTPTransfer; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + @SupportsBatching @InputRequirement(Requirement.INPUT_REQUIRED) @Tags({"remote", "copy", "egress", "put", "ftp", "archive", "files"}) @@ -71,49 +69,43 @@ public class PutFTP extends PutFileTransfer { private final AtomicReference> preSendDescriptorRef = new AtomicReference<>(); private final AtomicReference> postSendDescriptorRef = new AtomicReference<>(); - private List properties; - // PutFileTransfer.onTrigger() uses FlowFile attributes public static final PropertyDescriptor REMOTE_PATH = new PropertyDescriptor.Builder() .fromPropertyDescriptor(FTPTransfer.REMOTE_PATH) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).build(); - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(FTPTransfer.HOSTNAME); - properties.add(FTPTransfer.PORT); - properties.add(FTPTransfer.USERNAME); - properties.add(FTPTransfer.PASSWORD); - properties.add(REMOTE_PATH); - properties.add(FTPTransfer.CREATE_DIRECTORY); - properties.add(FTPTransfer.BATCH_SIZE); - properties.add(FTPTransfer.CONNECTION_TIMEOUT); - properties.add(FTPTransfer.DATA_TIMEOUT); - properties.add(FTPTransfer.CONFLICT_RESOLUTION); - properties.add(FTPTransfer.DOT_RENAME); - properties.add(FTPTransfer.TEMP_FILENAME); - properties.add(FTPTransfer.TRANSFER_MODE); - properties.add(FTPTransfer.CONNECTION_MODE); - properties.add(FTPTransfer.REJECT_ZERO_BYTE); - properties.add(FTPTransfer.LAST_MODIFIED_TIME); - properties.add(FTPTransfer.PERMISSIONS); - properties.add(FTPTransfer.USE_COMPRESSION); - properties.add(FTPTransfer.PROXY_CONFIGURATION_SERVICE); - properties.add(FTPTransfer.PROXY_TYPE); - properties.add(FTPTransfer.PROXY_HOST); - properties.add(FTPTransfer.PROXY_PORT); - properties.add(FTPTransfer.HTTP_PROXY_USERNAME); - properties.add(FTPTransfer.HTTP_PROXY_PASSWORD); - properties.add(FTPTransfer.BUFFER_SIZE); - properties.add(FTPTransfer.UTF8_ENCODING); - - this.properties = Collections.unmodifiableList(properties); - } + private static final List PROPERTIES = List.of( + FTPTransfer.HOSTNAME, + FTPTransfer.PORT, + FTPTransfer.USERNAME, + FTPTransfer.PASSWORD, + REMOTE_PATH, + FTPTransfer.CREATE_DIRECTORY, + FTPTransfer.BATCH_SIZE, + FTPTransfer.CONNECTION_TIMEOUT, + FTPTransfer.DATA_TIMEOUT, + FTPTransfer.CONFLICT_RESOLUTION, + FTPTransfer.DOT_RENAME, + FTPTransfer.TEMP_FILENAME, + FTPTransfer.TRANSFER_MODE, + FTPTransfer.CONNECTION_MODE, + FTPTransfer.REJECT_ZERO_BYTE, + FTPTransfer.LAST_MODIFIED_TIME, + FTPTransfer.PERMISSIONS, + FTPTransfer.USE_COMPRESSION, + FTPTransfer.PROXY_CONFIGURATION_SERVICE, + FTPTransfer.PROXY_TYPE, + FTPTransfer.PROXY_HOST, + FTPTransfer.PROXY_PORT, + FTPTransfer.HTTP_PROXY_USERNAME, + FTPTransfer.HTTP_PROXY_PASSWORD, + FTPTransfer.BUFFER_SIZE, + FTPTransfer.UTF8_ENCODING + ); @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java index b2096ba653..99d47e5f1f 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutFile.java @@ -37,7 +37,6 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; @@ -52,9 +51,6 @@ import java.nio.file.attribute.UserPrincipalLookupService; import java.time.OffsetDateTime; import java.time.format.DateTimeFormatter; import java.util.Arrays; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -167,6 +163,17 @@ public class PutFile extends AbstractProcessor { .defaultValue("true") .build(); + private static final List PROPERTIES = List.of( + DIRECTORY, + CONFLICT_RESOLUTION, + CREATE_DIRS, + MAX_DESTINATION_FILES, + CHANGE_LAST_MODIFIED_TIME, + CHANGE_PERMISSIONS, + CHANGE_OWNER, + CHANGE_GROUP + ); + public static final int MAX_FILE_LOCK_ATTEMPTS = 10; public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") @@ -177,38 +184,19 @@ public class PutFile extends AbstractProcessor { .description("Files that could not be written to the output directory for some reason are transferred to this relationship") .build(); - private List properties; - private Set relationships; - - @Override - protected void init(final ProcessorInitializationContext context) { - // relationships - final Set procRels = new HashSet<>(); - procRels.add(REL_SUCCESS); - procRels.add(REL_FAILURE); - relationships = Collections.unmodifiableSet(procRels); - - // descriptors - final List supDescriptors = new ArrayList<>(); - supDescriptors.add(DIRECTORY); - supDescriptors.add(CONFLICT_RESOLUTION); - supDescriptors.add(CREATE_DIRS); - supDescriptors.add(MAX_DESTINATION_FILES); - supDescriptors.add(CHANGE_LAST_MODIFIED_TIME); - supDescriptors.add(CHANGE_PERMISSIONS); - supDescriptors.add(CHANGE_OWNER); - supDescriptors.add(CHANGE_GROUP); - properties = Collections.unmodifiableList(supDescriptors); - } + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutRecord.java index f0c1eca797..911013810d 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutRecord.java @@ -40,10 +40,7 @@ import org.apache.nifi.util.StringUtils; import java.io.IOException; import java.io.InputStream; -import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -80,6 +77,12 @@ public class PutRecord extends AbstractProcessor { .required(true) .build(); + private static final List PROPERTIES = List.of( + RECORD_READER, + RECORD_SINK, + INCLUDE_ZERO_RECORD_RESULTS + ); + // Relationships static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") @@ -95,33 +98,22 @@ public class PutRecord extends AbstractProcessor { .description("A FlowFile is routed to this relationship if the records could not be transmitted and retrying the operation will also fail") .build(); - private static final List properties; - private static final Set relationships; + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE, + REL_RETRY + ); private volatile RecordSinkService recordSinkService; - static { - final List props = new ArrayList<>(); - props.add(RECORD_READER); - props.add(RECORD_SINK); - props.add(INCLUDE_ZERO_RECORD_RESULTS); - properties = Collections.unmodifiableList(props); - - final Set r = new HashSet<>(); - r.add(REL_SUCCESS); - r.add(REL_FAILURE); - r.add(REL_RETRY); - relationships = Collections.unmodifiableSet(r); - } - @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @OnScheduled diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java index ef2d004ef3..769c89cdc4 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSFTP.java @@ -16,11 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; - import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.SupportsBatching; @@ -31,12 +26,15 @@ import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.processor.ProcessContext; -import org.apache.nifi.processor.ProcessorInitializationContext; +import org.apache.nifi.processor.util.file.transfer.FileTransfer; import org.apache.nifi.processor.util.file.transfer.PutFileTransfer; import org.apache.nifi.processors.standard.util.FTPTransfer; -import org.apache.nifi.processor.util.file.transfer.FileTransfer; import org.apache.nifi.processors.standard.util.SFTPTransfer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + @SupportsBatching @InputRequirement(Requirement.INPUT_REQUIRED) @Tags({"remote", "copy", "egress", "put", "sftp", "archive", "files"}) @@ -44,51 +42,46 @@ import org.apache.nifi.processors.standard.util.SFTPTransfer; @SeeAlso(GetSFTP.class) public class PutSFTP extends PutFileTransfer { - private List properties; - - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(FileTransfer.HOSTNAME); - properties.add(SFTPTransfer.PORT); - properties.add(FileTransfer.USERNAME); - properties.add(FileTransfer.PASSWORD); - properties.add(SFTPTransfer.PRIVATE_KEY_PATH); - properties.add(SFTPTransfer.PRIVATE_KEY_PASSPHRASE); - properties.add(FileTransfer.REMOTE_PATH); - properties.add(FileTransfer.CREATE_DIRECTORY); - properties.add(SFTPTransfer.DISABLE_DIRECTORY_LISTING); - properties.add(FileTransfer.BATCH_SIZE); - properties.add(FileTransfer.CONNECTION_TIMEOUT); - properties.add(FileTransfer.DATA_TIMEOUT); - properties.add(FileTransfer.CONFLICT_RESOLUTION); - properties.add(FileTransfer.REJECT_ZERO_BYTE); - properties.add(FileTransfer.DOT_RENAME); - properties.add(FileTransfer.TEMP_FILENAME); - properties.add(SFTPTransfer.HOST_KEY_FILE); - properties.add(FileTransfer.LAST_MODIFIED_TIME); - properties.add(FileTransfer.PERMISSIONS); - properties.add(FileTransfer.REMOTE_OWNER); - properties.add(FileTransfer.REMOTE_GROUP); - properties.add(SFTPTransfer.STRICT_HOST_KEY_CHECKING); - properties.add(SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT); - properties.add(FileTransfer.USE_COMPRESSION); - properties.add(SFTPTransfer.PROXY_CONFIGURATION_SERVICE); - properties.add(FTPTransfer.PROXY_TYPE); - properties.add(FTPTransfer.PROXY_HOST); - properties.add(FTPTransfer.PROXY_PORT); - properties.add(FTPTransfer.HTTP_PROXY_USERNAME); - properties.add(FTPTransfer.HTTP_PROXY_PASSWORD); - properties.add(SFTPTransfer.CIPHERS_ALLOWED); - properties.add(SFTPTransfer.KEY_ALGORITHMS_ALLOWED); - properties.add(SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED); - properties.add(SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED); - this.properties = Collections.unmodifiableList(properties); - } + private static final List PROPERTIES = List.of( + FileTransfer.HOSTNAME, + SFTPTransfer.PORT, + FileTransfer.USERNAME, + FileTransfer.PASSWORD, + SFTPTransfer.PRIVATE_KEY_PATH, + SFTPTransfer.PRIVATE_KEY_PASSPHRASE, + FileTransfer.REMOTE_PATH, + FileTransfer.CREATE_DIRECTORY, + SFTPTransfer.DISABLE_DIRECTORY_LISTING, + FileTransfer.BATCH_SIZE, + FileTransfer.CONNECTION_TIMEOUT, + FileTransfer.DATA_TIMEOUT, + FileTransfer.CONFLICT_RESOLUTION, + FileTransfer.REJECT_ZERO_BYTE, + FileTransfer.DOT_RENAME, + FileTransfer.TEMP_FILENAME, + SFTPTransfer.HOST_KEY_FILE, + FileTransfer.LAST_MODIFIED_TIME, + FileTransfer.PERMISSIONS, + FileTransfer.REMOTE_OWNER, + FileTransfer.REMOTE_GROUP, + SFTPTransfer.STRICT_HOST_KEY_CHECKING, + SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT, + FileTransfer.USE_COMPRESSION, + SFTPTransfer.PROXY_CONFIGURATION_SERVICE, + FTPTransfer.PROXY_TYPE, + FTPTransfer.PROXY_HOST, + FTPTransfer.PROXY_PORT, + FTPTransfer.HTTP_PROXY_USERNAME, + FTPTransfer.HTTP_PROXY_PASSWORD, + SFTPTransfer.CIPHERS_ALLOWED, + SFTPTransfer.KEY_ALGORITHMS_ALLOWED, + SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED, + SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED + ); @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java index 243f14119e..938f8302d9 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSQL.java @@ -16,7 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.util.Optional; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.ReadsAttribute; @@ -67,9 +66,9 @@ import java.util.BitSet; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; @@ -181,6 +180,17 @@ public class PutSQL extends AbstractSessionFactoryProcessor { .defaultValue("false") .build(); + private static final List PROPERTIES = List.of( + CONNECTION_POOL, + SQL_STATEMENT, + SUPPORT_TRANSACTIONS, + AUTO_COMMIT, + TRANSACTION_TIMEOUT, + BATCH_SIZE, + OBTAIN_GENERATED_KEYS, + RollbackOnFailure.ROLLBACK_ON_FAILURE + ); + static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("A FlowFile is routed to this relationship after the database is successfully updated") @@ -195,6 +205,12 @@ public class PutSQL extends AbstractSessionFactoryProcessor { + "such as an invalid query or an integrity constraint violation") .build(); + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_RETRY, + REL_FAILURE + ); + private static final String FRAGMENT_ID_ATTR = FragmentAttributes.FRAGMENT_ID.key(); private static final String FRAGMENT_INDEX_ATTR = FragmentAttributes.FRAGMENT_INDEX.key(); private static final String FRAGMENT_COUNT_ATTR = FragmentAttributes.FRAGMENT_COUNT.key(); @@ -205,16 +221,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor { @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(); - properties.add(CONNECTION_POOL); - properties.add(SQL_STATEMENT); - properties.add(SUPPORT_TRANSACTIONS); - properties.add(AUTO_COMMIT); - properties.add(TRANSACTION_TIMEOUT); - properties.add(BATCH_SIZE); - properties.add(OBTAIN_GENERATED_KEYS); - properties.add(RollbackOnFailure.ROLLBACK_ON_FAILURE); - return properties; + return PROPERTIES; } @Override @@ -247,11 +254,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor { @Override public Set getRelationships() { - final Set rels = new HashSet<>(); - rels.add(REL_SUCCESS); - rels.add(REL_RETRY); - rels.add(REL_FAILURE); - return rels; + return RELATIONSHIPS; } private static class FunctionContext extends RollbackOnFailure { @@ -285,7 +288,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor { private final PartialFunctions.InitConnection initConnection = (c, s, fc, ffs) -> { final Connection connection = c.getProperty(CONNECTION_POOL).asControllerService(DBCPService.class) - .getConnection(ffs == null || ffs.isEmpty() ? emptyMap() : ffs.get(0).getAttributes()); + .getConnection(ffs == null || ffs.isEmpty() ? emptyMap() : ffs.getFirst().getAttributes()); try { fc.originalAutoCommit = connection.getAutoCommit(); final boolean autocommit = c.getProperty(AUTO_COMMIT).asBoolean(); @@ -337,7 +340,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor { // Create a new PreparedStatement or reuse the one from the last group if that is the same. final StatementFlowFileEnclosure enclosure; - final StatementFlowFileEnclosure lastEnclosure = groups.isEmpty() ? null : groups.get(groups.size() - 1); + final StatementFlowFileEnclosure lastEnclosure = groups.isEmpty() ? null : groups.getLast(); if (lastEnclosure == null || !lastEnclosure.getSql().equals(sql)) { enclosure = new StatementFlowFileEnclosure(sql); @@ -366,7 +369,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor { // Create a new PreparedStatement or reuse the one from the last group if that is the same. final StatementFlowFileEnclosure enclosure; - final StatementFlowFileEnclosure lastEnclosure = groups.isEmpty() ? null : groups.get(groups.size() - 1); + final StatementFlowFileEnclosure lastEnclosure = groups.isEmpty() ? null : groups.getLast(); if (lastEnclosure == null || !lastEnclosure.getSql().equals(sql)) { enclosure = new StatementFlowFileEnclosure(sql); @@ -452,7 +455,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor { String url = "jdbc://unknown-host"; try { url = conn.getMetaData().getURL(); - } catch (final SQLException sqle) { + } catch (final SQLException ignored) { } // Emit a Provenance SEND event @@ -679,10 +682,10 @@ public class PutSQL extends AbstractSessionFactoryProcessor { /** * Pulls a batch of FlowFiles from the incoming queues. If no FlowFiles are available, returns null. * Otherwise, a List of FlowFiles will be returned. - * + *

* If all FlowFiles pulled are not eligible to be processed, the FlowFiles will be penalized and transferred back * to the input queue and an empty List will be returned. - * + *

* Otherwise, if the Support Fragmented Transactions property is true, all FlowFiles that belong to the same * transaction will be sorted in the order that they should be evaluated. * @@ -776,8 +779,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor { session.read(flowFile, in -> StreamUtils.fillBuffer(in, buffer)); // Create the PreparedStatement to use for this FlowFile. - final String sql = new String(buffer, StandardCharsets.UTF_8); - return sql; + return new String(buffer, StandardCharsets.UTF_8); } /** @@ -1092,11 +1094,10 @@ public class PutSQL extends AbstractSessionFactoryProcessor { if (obj == this) { return false; } - if (!(obj instanceof StatementFlowFileEnclosure)) { + if (!(obj instanceof StatementFlowFileEnclosure other)) { return false; } - final StatementFlowFileEnclosure other = (StatementFlowFileEnclosure) obj; return sql.equals(other.sql); } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSyslog.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSyslog.java index 828ccb43cb..a379ba0935 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSyslog.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutSyslog.java @@ -16,19 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.nio.charset.Charset; -import java.time.Duration; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import javax.net.ssl.SSLContext; - import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.TriggerWhenEmpty; @@ -42,14 +29,13 @@ import org.apache.nifi.components.PropertyValue; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.event.transport.EventSender; -import org.apache.nifi.event.transport.configuration.TransportProtocol; import org.apache.nifi.event.transport.configuration.LineEnding; +import org.apache.nifi.event.transport.configuration.TransportProtocol; import org.apache.nifi.event.transport.netty.StringNettyEventSenderFactory; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; @@ -57,6 +43,17 @@ import org.apache.nifi.ssl.SSLContextService; import org.apache.nifi.syslog.parsers.SyslogParser; import org.apache.nifi.util.StopWatch; +import javax.net.ssl.SSLContext; +import java.nio.charset.Charset; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + @InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) @TriggerWhenEmpty @Tags({"syslog", "put", "udp", "tcp", "logs"}) @@ -152,6 +149,23 @@ public class PutSyslog extends AbstractSyslogProcessor { .dependsOn(PROTOCOL, TCP_VALUE) .build(); + private static final List PROPERTIES = List.of( + HOSTNAME, + PROTOCOL, + PORT, + MAX_SOCKET_SEND_BUFFER_SIZE, + SSL_CONTEXT_SERVICE, + IDLE_EXPIRATION, + TIMEOUT, + BATCH_SIZE, + CHARSET, + MSG_PRIORITY, + MSG_VERSION, + MSG_TIMESTAMP, + MSG_HOSTNAME, + MSG_BODY + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("FlowFiles that are sent successfully to Syslog are sent out this relationship.") @@ -165,46 +179,23 @@ public class PutSyslog extends AbstractSyslogProcessor { .description("FlowFiles that do not form a valid Syslog message are sent out this relationship.") .build(); - private Set relationships; - private List descriptors; + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE, + REL_INVALID + ); private EventSender eventSender; private String transitUri; - @Override - protected void init(final ProcessorInitializationContext context) { - final List descriptors = new ArrayList<>(); - descriptors.add(HOSTNAME); - descriptors.add(PROTOCOL); - descriptors.add(PORT); - descriptors.add(MAX_SOCKET_SEND_BUFFER_SIZE); - descriptors.add(SSL_CONTEXT_SERVICE); - descriptors.add(IDLE_EXPIRATION); - descriptors.add(TIMEOUT); - descriptors.add(BATCH_SIZE); - descriptors.add(CHARSET); - descriptors.add(MSG_PRIORITY); - descriptors.add(MSG_VERSION); - descriptors.add(MSG_TIMESTAMP); - descriptors.add(MSG_HOSTNAME); - descriptors.add(MSG_BODY); - this.descriptors = Collections.unmodifiableList(descriptors); - - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - relationships.add(REL_FAILURE); - relationships.add(REL_INVALID); - this.relationships = Collections.unmodifiableSet(relationships); - } - @Override public Set getRelationships() { - return this.relationships; + return RELATIONSHIPS; } @Override public final List getSupportedPropertyDescriptors() { - return descriptors; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutTCP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutTCP.java index cc2c5d1774..edcc80aa4a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutTCP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/PutTCP.java @@ -50,8 +50,6 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; -import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -74,17 +72,17 @@ public class PutTCP extends AbstractPutEventProcessor { .description("Specifies the strategy used for reading input FlowFiles and transmitting messages to the destination socket address") .required(true) .allowableValues(TransmissionStrategy.class) - .defaultValue(TransmissionStrategy.FLOWFILE_ORIENTED.getValue()) + .defaultValue(TransmissionStrategy.FLOWFILE_ORIENTED) .build(); static final PropertyDescriptor DEPENDENT_CHARSET = new PropertyDescriptor.Builder() .fromPropertyDescriptor(CHARSET) - .dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.FLOWFILE_ORIENTED.getValue()) + .dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.FLOWFILE_ORIENTED) .build(); static final PropertyDescriptor DEPENDENT_OUTGOING_MESSAGE_DELIMITER = new PropertyDescriptor.Builder() .fromPropertyDescriptor(OUTGOING_MESSAGE_DELIMITER) - .dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.FLOWFILE_ORIENTED.getValue()) + .dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.FLOWFILE_ORIENTED) .build(); static final PropertyDescriptor RECORD_READER = new PropertyDescriptor.Builder() @@ -93,7 +91,7 @@ public class PutTCP extends AbstractPutEventProcessor { .description("Specifies the Controller Service to use for reading Records from input FlowFiles") .identifiesControllerService(RecordReaderFactory.class) .required(true) - .dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.RECORD_ORIENTED.getValue()) + .dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.RECORD_ORIENTED) .build(); static final PropertyDescriptor RECORD_WRITER = new PropertyDescriptor.Builder() @@ -102,10 +100,10 @@ public class PutTCP extends AbstractPutEventProcessor { .description("Specifies the Controller Service to use for writing Records to the configured socket address") .identifiesControllerService(RecordSetWriterFactory.class) .required(true) - .dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.RECORD_ORIENTED.getValue()) + .dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.RECORD_ORIENTED) .build(); - private static final List ADDITIONAL_PROPERTIES = Collections.unmodifiableList(Arrays.asList( + private static final List ADDITIONAL_PROPERTIES = List.of( CONNECTION_PER_FLOWFILE, SSL_CONTEXT_SERVICE, TRANSMISSION_STRATEGY, @@ -113,7 +111,7 @@ public class PutTCP extends AbstractPutEventProcessor { DEPENDENT_CHARSET, RECORD_READER, RECORD_WRITER - )); + ); @Override protected List getAdditionalProperties() { @@ -128,7 +126,7 @@ public class PutTCP extends AbstractPutEventProcessor { return; } - final TransmissionStrategy transmissionStrategy = TransmissionStrategy.valueOf(context.getProperty(TRANSMISSION_STRATEGY).getValue()); + final TransmissionStrategy transmissionStrategy = context.getProperty(TRANSMISSION_STRATEGY).asAllowableValue(TransmissionStrategy.class); final StopWatch stopWatch = new StopWatch(true); try { final int recordCount; diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTable.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTable.java index 42e272637a..fbed15c0ea 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTable.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTable.java @@ -39,16 +39,13 @@ import org.apache.nifi.processors.standard.sql.SqlWriter; import org.apache.nifi.scheduling.SchedulingStrategy; import org.apache.nifi.util.db.JdbcCommon; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; import static org.apache.nifi.util.db.JdbcProperties.NORMALIZE_NAMES_FOR_AVRO; +import static org.apache.nifi.util.db.JdbcProperties.USE_AVRO_LOGICAL_TYPES; import static org.apache.nifi.util.db.JdbcProperties.VARIABLE_REGISTRY_ONLY_DEFAULT_PRECISION; import static org.apache.nifi.util.db.JdbcProperties.VARIABLE_REGISTRY_ONLY_DEFAULT_SCALE; -import static org.apache.nifi.util.db.JdbcProperties.USE_AVRO_LOGICAL_TYPES; @TriggerSerially @@ -90,36 +87,38 @@ import static org.apache.nifi.util.db.JdbcProperties.USE_AVRO_LOGICAL_TYPES; @DefaultSchedule(strategy = SchedulingStrategy.TIMER_DRIVEN, period = "1 min") public class QueryDatabaseTable extends AbstractQueryDatabaseTable { + static final PropertyDescriptor TABLE_NAME = new PropertyDescriptor.Builder() + .fromPropertyDescriptor(AbstractDatabaseFetchProcessor.TABLE_NAME) + .description("The name of the database table to be queried. When a custom query is used, this property is used to alias the query and appears as an attribute on the FlowFile.") + .build(); + + private static final List PROPERTIES = List.of( + DBCP_SERVICE, + DB_TYPE, + TABLE_NAME, + COLUMN_NAMES, + WHERE_CLAUSE, + SQL_QUERY, + MAX_VALUE_COLUMN_NAMES, + INITIAL_LOAD_STRATEGY, + QUERY_TIMEOUT, + FETCH_SIZE, + AUTO_COMMIT, + MAX_ROWS_PER_FLOW_FILE, + OUTPUT_BATCH_SIZE, + MAX_FRAGMENTS, + NORMALIZE_NAMES_FOR_AVRO, + TRANS_ISOLATION_LEVEL, + USE_AVRO_LOGICAL_TYPES, + VARIABLE_REGISTRY_ONLY_DEFAULT_PRECISION, + VARIABLE_REGISTRY_ONLY_DEFAULT_SCALE + ); + + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); + public QueryDatabaseTable() { - final Set r = new HashSet<>(); - r.add(REL_SUCCESS); - relationships = Collections.unmodifiableSet(r); - - final List pds = new ArrayList<>(); - pds.add(DBCP_SERVICE); - pds.add(DB_TYPE); - pds.add(new PropertyDescriptor.Builder() - .fromPropertyDescriptor(TABLE_NAME) - .description("The name of the database table to be queried. When a custom query is used, this property is used to alias the query and appears as an attribute on the FlowFile.") - .build()); - pds.add(COLUMN_NAMES); - pds.add(WHERE_CLAUSE); - pds.add(SQL_QUERY); - pds.add(MAX_VALUE_COLUMN_NAMES); - pds.add(INITIAL_LOAD_STRATEGY); - pds.add(QUERY_TIMEOUT); - pds.add(FETCH_SIZE); - pds.add(AUTO_COMMIT); - pds.add(MAX_ROWS_PER_FLOW_FILE); - pds.add(OUTPUT_BATCH_SIZE); - pds.add(MAX_FRAGMENTS); - pds.add(NORMALIZE_NAMES_FOR_AVRO); - pds.add(TRANS_ISOLATION_LEVEL); - pds.add(USE_AVRO_LOGICAL_TYPES); - pds.add(VARIABLE_REGISTRY_ONLY_DEFAULT_PRECISION); - pds.add(VARIABLE_REGISTRY_ONLY_DEFAULT_SCALE); - - propDescriptors = Collections.unmodifiableList(pds); + relationships = RELATIONSHIPS; + propDescriptors = PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecord.java index 65e00a8927..7e40f702e7 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryDatabaseTableRecord.java @@ -43,9 +43,7 @@ import org.apache.nifi.scheduling.SchedulingStrategy; import org.apache.nifi.serialization.RecordSetWriterFactory; import org.apache.nifi.util.db.JdbcCommon; -import java.util.ArrayList; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; @@ -170,6 +168,11 @@ import static org.apache.nifi.util.db.JdbcProperties.VARIABLE_REGISTRY_ONLY_DEFA ) public class QueryDatabaseTableRecord extends AbstractQueryDatabaseTable { + public static final PropertyDescriptor TABLE_NAME = new PropertyDescriptor.Builder() + .fromPropertyDescriptor(AbstractDatabaseFetchProcessor.TABLE_NAME) + .description("The name of the database table to be queried. When a custom query is used, this property is used to alias the query and appears as an attribute on the FlowFile.") + .build(); + public static final PropertyDescriptor RECORD_WRITER_FACTORY = new PropertyDescriptor.Builder() .name("qdbtr-record-writer") .displayName("Record Writer") @@ -188,36 +191,33 @@ public class QueryDatabaseTableRecord extends AbstractQueryDatabaseTable { .required(true) .build(); + private static final List PROPERTIES = List.of( + DBCP_SERVICE, + DB_TYPE, + TABLE_NAME, + COLUMN_NAMES, + WHERE_CLAUSE, + SQL_QUERY, + RECORD_WRITER_FACTORY, + MAX_VALUE_COLUMN_NAMES, + INITIAL_LOAD_STRATEGY, + QUERY_TIMEOUT, + FETCH_SIZE, + AUTO_COMMIT, + MAX_ROWS_PER_FLOW_FILE, + OUTPUT_BATCH_SIZE, + MAX_FRAGMENTS, + NORMALIZE_NAMES, + USE_AVRO_LOGICAL_TYPES, + VARIABLE_REGISTRY_ONLY_DEFAULT_PRECISION, + VARIABLE_REGISTRY_ONLY_DEFAULT_SCALE + ); + + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); + public QueryDatabaseTableRecord() { - final Set r = new HashSet<>(); - r.add(REL_SUCCESS); - relationships = Collections.unmodifiableSet(r); - - final List pds = new ArrayList<>(); - pds.add(DBCP_SERVICE); - pds.add(DB_TYPE); - pds.add(new PropertyDescriptor.Builder() - .fromPropertyDescriptor(TABLE_NAME) - .description("The name of the database table to be queried. When a custom query is used, this property is used to alias the query and appears as an attribute on the FlowFile.") - .build()); - pds.add(COLUMN_NAMES); - pds.add(WHERE_CLAUSE); - pds.add(SQL_QUERY); - pds.add(RECORD_WRITER_FACTORY); - pds.add(MAX_VALUE_COLUMN_NAMES); - pds.add(INITIAL_LOAD_STRATEGY); - pds.add(QUERY_TIMEOUT); - pds.add(FETCH_SIZE); - pds.add(AUTO_COMMIT); - pds.add(MAX_ROWS_PER_FLOW_FILE); - pds.add(OUTPUT_BATCH_SIZE); - pds.add(MAX_FRAGMENTS); - pds.add(NORMALIZE_NAMES); - pds.add(USE_AVRO_LOGICAL_TYPES); - pds.add(VARIABLE_REGISTRY_ONLY_DEFAULT_PRECISION); - pds.add(VARIABLE_REGISTRY_ONLY_DEFAULT_SCALE); - - propDescriptors = Collections.unmodifiableList(pds); + relationships = RELATIONSHIPS; + propDescriptors = PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryRecord.java index 6c48986599..5b761d72a3 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/QueryRecord.java @@ -228,6 +228,15 @@ public class QueryRecord extends AbstractProcessor { .required(true) .build(); + private static final List PROPERTIES = List.of( + RECORD_READER_FACTORY, + RECORD_WRITER_FACTORY, + INCLUDE_ZERO_RECORD_FLOWFILES, + CACHE_SCHEMA, + DEFAULT_PRECISION, + DEFAULT_SCALE + ); + public static final Relationship REL_ORIGINAL = new Relationship.Builder() .name("original") .description("The original FlowFile is routed to this relationship") @@ -239,7 +248,6 @@ public class QueryRecord extends AbstractProcessor { + "be routed to this relationship") .build(); - private List properties; private final Set relationships = Collections.synchronizedSet(new HashSet<>()); private final Cache, BlockingQueue> statementQueues = Caffeine.newBuilder() @@ -249,14 +257,6 @@ public class QueryRecord extends AbstractProcessor { @Override protected void init(final ProcessorInitializationContext context) { - this.properties = List.of( - RECORD_READER_FACTORY, - RECORD_WRITER_FACTORY, - INCLUDE_ZERO_RECORD_FLOWFILES, - CACHE_SCHEMA, - DEFAULT_PRECISION, - DEFAULT_SCALE); - relationships.add(REL_FAILURE); relationships.add(REL_ORIGINAL); } @@ -268,7 +268,7 @@ public class QueryRecord extends AbstractProcessor { @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RemoveRecordField.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RemoveRecordField.java index ce8da4ed74..a519178f9b 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RemoveRecordField.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RemoveRecordField.java @@ -17,10 +17,6 @@ package org.apache.nifi.processors.standard; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; import org.apache.nifi.annotation.behavior.DynamicProperty; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; @@ -45,6 +41,11 @@ import org.apache.nifi.record.path.util.RecordPathCache; import org.apache.nifi.record.path.validation.RecordPathValidator; import org.apache.nifi.serialization.record.Record; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Set; + @SideEffectFree @SupportsBatching @@ -117,7 +118,7 @@ public class RemoveRecordField extends AbstractRecordProcessor { return validationResults; } - return Collections.singleton(new ValidationResult.Builder() + return Set.of(new ValidationResult.Builder() .subject("User-defined Properties") .valid(false) .explanation("at least one RecordPath must be specified") diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RenameRecordField.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RenameRecordField.java index c1dd5ab339..91d2c6a168 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RenameRecordField.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RenameRecordField.java @@ -17,12 +17,6 @@ package org.apache.nifi.processors.standard; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; import org.apache.nifi.annotation.behavior.DynamicProperty; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.SideEffectFree; @@ -48,6 +42,14 @@ import org.apache.nifi.record.path.validation.RecordPathPropertyNameValidator; import org.apache.nifi.serialization.record.Record; import org.apache.nifi.serialization.record.util.DataTypeUtils; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; + @SideEffectFree @SupportsBatching @InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) @@ -147,7 +149,7 @@ public class RenameRecordField extends AbstractRecordProcessor { return Collections.emptyList(); } - return Collections.singleton(new ValidationResult.Builder() + return Set.of(new ValidationResult.Builder() .subject("User-defined Properties") .valid(false) .explanation("At least one RecordPath must be specified") diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java index dcc0cf2b74..5805a01fd2 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceText.java @@ -45,7 +45,6 @@ import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.io.StreamCallback; @@ -63,9 +62,7 @@ import java.nio.BufferOverflowException; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -144,7 +141,7 @@ import java.util.regex.Pattern; ) public class ReplaceText extends AbstractProcessor { - private static Pattern REPLACEMENT_NORMALIZATION_PATTERN = Pattern.compile("(\\$\\D)"); + private static final Pattern REPLACEMENT_NORMALIZATION_PATTERN = Pattern.compile("(\\$\\D)"); // Constants public static final String LINE_BY_LINE = "Line-by-Line"; @@ -193,7 +190,6 @@ public class ReplaceText extends AbstractProcessor { "Substitute variable references (specified in ${var} form) using FlowFile attributes for looking up the replacement value by variable name. " + "When this strategy is chosen, both the and properties are ignored."); - public static final PropertyDescriptor REPLACEMENT_STRATEGY = new PropertyDescriptor.Builder() .name("Replacement Strategy") .description("The strategy for how and what to replace within the FlowFile's text content.") @@ -278,7 +274,17 @@ public class ReplaceText extends AbstractProcessor { .required(false) .build(); - + private static final List PROPERTIES = List.of( + REPLACEMENT_STRATEGY, + SEARCH_VALUE, + REPLACEMENT_VALUE, + PREPEND_TEXT, + APPEND_TEXT, + CHARACTER_SET, + MAX_BUFFER_SIZE, + EVALUATION_MODE, + LINE_BY_LINE_EVALUATION_MODE + ); // Relationships public static final Relationship REL_SUCCESS = new Relationship.Builder() @@ -291,38 +297,21 @@ public class ReplaceText extends AbstractProcessor { .description("FlowFiles that could not be updated are routed to this relationship") .build(); - private List properties; - private Set relationships; + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); + private ReplacementStrategyExecutor replacementStrategyExecutor; - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(REPLACEMENT_STRATEGY); - properties.add(SEARCH_VALUE); - properties.add(REPLACEMENT_VALUE); - properties.add(PREPEND_TEXT); - properties.add(APPEND_TEXT); - properties.add(CHARACTER_SET); - properties.add(MAX_BUFFER_SIZE); - properties.add(EVALUATION_MODE); - properties.add(LINE_BY_LINE_EVALUATION_MODE); - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - relationships.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(relationships); - } - @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java index 9fc188194d..36b1b2f305 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ReplaceTextWithMapping.java @@ -36,7 +36,6 @@ import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.io.StreamCallback; @@ -54,9 +53,7 @@ import java.io.OutputStream; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -120,6 +117,15 @@ public class ReplaceTextWithMapping extends AbstractProcessor { .defaultValue("1 MB") .build(); + private static final List PROPERTIES = List.of( + REGEX, + MATCHING_GROUP_FOR_LOOKUP_KEY, + MAPPING_FILE, + MAPPING_FILE_REFRESH_INTERVAL, + CHARACTER_SET, + MAX_BUFFER_SIZE + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("FlowFiles that have been successfully updated are routed to this relationship, as well as FlowFiles whose content does not match the given Regular Expression") @@ -129,10 +135,12 @@ public class ReplaceTextWithMapping extends AbstractProcessor { .description("FlowFiles that could not be updated are routed to this relationship") .build(); - private final Pattern backReferencePattern = Pattern.compile("[^\\\\]\\$(\\d+)"); + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); - private List properties; - private Set relationships; + private final Pattern backReferencePattern = Pattern.compile("[^\\\\]\\$(\\d+)"); private final ReentrantLock processorLock = new ReentrantLock(); private final AtomicLong lastModified = new AtomicLong(0L); @@ -158,31 +166,14 @@ public class ReplaceTextWithMapping extends AbstractProcessor { return errors; } - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(REGEX); - properties.add(MATCHING_GROUP_FOR_LOOKUP_KEY); - properties.add(MAPPING_FILE); - properties.add(MAPPING_FILE_REFRESH_INTERVAL); - properties.add(CHARACTER_SET); - properties.add(MAX_BUFFER_SIZE); - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - relationships.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(relationships); - } - @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override @@ -303,7 +294,7 @@ public class ReplaceTextWithMapping extends AbstractProcessor { } public Map getMapping() { - return Collections.unmodifiableMap(mapping); + return Map.copyOf(mapping); } public boolean isConfigured() { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RetryFlowFile.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RetryFlowFile.java index 66bd13b384..2a053a4f3c 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RetryFlowFile.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RetryFlowFile.java @@ -34,15 +34,11 @@ import org.apache.nifi.logging.LogLevel; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.util.StringUtils; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; @@ -74,13 +70,6 @@ import java.util.Set; "the 'retries_exceeded' relationship", expressionLanguageScope = ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) public class RetryFlowFile extends AbstractProcessor { - private List properties; - private Set relationships; - private String retryAttribute; - private Boolean penalizeRetried; - private Boolean failOnOverwrite; - private String reuseMode; - private String lastRetriedBy; public static final PropertyDescriptor RETRY_ATTRIBUTE = new PropertyDescriptor.Builder() .name("retry-attribute") @@ -151,6 +140,14 @@ public class RetryFlowFile extends AbstractProcessor { .defaultValue(FAIL_ON_REUSE.getValue()) .build(); + private static final List PROPERTIES = List.of( + RETRY_ATTRIBUTE, + MAXIMUM_RETRIES, + PENALIZE_RETRIED, + FAIL_ON_OVERWRITE, + REUSE_MODE + ); + public static final Relationship RETRY = new Relationship.Builder() .name("retry") .description("Input FlowFile has not exceeded the configured maximum retry count, pass this " + @@ -170,26 +167,21 @@ public class RetryFlowFile extends AbstractProcessor { .autoTerminateDefault(true) .build(); + private static final Set RELATIONSHIPS = Set.of( + RETRY, + RETRIES_EXCEEDED, + FAILURE + ); + + private String retryAttribute; + private Boolean penalizeRetried; + private Boolean failOnOverwrite; + private String reuseMode; + private String lastRetriedBy; + @Override public Set getRelationships() { - return relationships; - } - - @Override - protected void init(ProcessorInitializationContext context) { - List props = new ArrayList<>(); - props.add(RETRY_ATTRIBUTE); - props.add(MAXIMUM_RETRIES); - props.add(PENALIZE_RETRIED); - props.add(FAIL_ON_OVERWRITE); - props.add(REUSE_MODE); - this.properties = Collections.unmodifiableList(props); - - Set rels = new HashSet<>(); - rels.add(RETRY); - rels.add(RETRIES_EXCEEDED); - rels.add(FAILURE); - this.relationships = Collections.unmodifiableSet(rels); + return RELATIONSHIPS; } @Override @@ -207,7 +199,7 @@ public class RetryFlowFile extends AbstractProcessor { @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @OnScheduled @@ -231,7 +223,7 @@ public class RetryFlowFile extends AbstractProcessor { try { currentRetry = (null == retryAttributeValue) ? 1 - : Integer.valueOf(retryAttributeValue.trim()) + 1; + : Integer.parseInt(retryAttributeValue.trim()) + 1; } catch (NumberFormatException ex) { // Configured to fail if this was not a number if (failOnOverwrite) { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java index ea3c9c0608..7e57f669c0 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnAttribute.java @@ -41,12 +41,9 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.util.StandardValidators; -import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -195,6 +192,8 @@ public class RouteOnAttribute extends AbstractProcessor { .defaultValue(ROUTE_PROPERTY_NAME.getValue()) .build(); + private static final List PROPERTIES = List.of(ROUTE_STRATEGY); + public static final Relationship REL_NO_MATCH = new Relationship.Builder() .name("unmatched") .description("FlowFiles that do not match any user-define expression will be routed here") @@ -204,8 +203,7 @@ public class RouteOnAttribute extends AbstractProcessor { .description("FlowFiles will be routed to 'match' if one or all Expressions match, depending on the configuration of the Routing Strategy property") .build(); - private AtomicReference> relationships = new AtomicReference<>(); - private List properties; + private final AtomicReference> relationships = new AtomicReference<>(Set.of(REL_NO_MATCH)); private volatile String configuredRouteStrategy = ROUTE_STRATEGY.getDefaultValue(); private volatile Set dynamicPropertyNames = new HashSet<>(); @@ -215,17 +213,6 @@ public class RouteOnAttribute extends AbstractProcessor { */ private volatile Map propertyMap = new HashMap<>(); - @Override - protected void init(final ProcessorInitializationContext context) { - final Set set = new HashSet<>(); - set.add(REL_NO_MATCH); - relationships = new AtomicReference<>(set); - - final List properties = new ArrayList<>(); - properties.add(ROUTE_STRATEGY); - this.properties = Collections.unmodifiableList(properties); - } - @Override public Set getRelationships() { return relationships.get(); @@ -233,7 +220,7 @@ public class RouteOnAttribute extends AbstractProcessor { @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override @@ -259,7 +246,7 @@ public class RouteOnAttribute extends AbstractProcessor { newDynamicPropertyNames.add(descriptor.getName()); } - this.dynamicPropertyNames = Collections.unmodifiableSet(newDynamicPropertyNames); + this.dynamicPropertyNames = Set.copyOf(newDynamicPropertyNames); } // formulate the new set of Relationships diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java index 6069db2e5c..ff1509b6a3 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteOnContent.java @@ -16,19 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import java.util.regex.Pattern; import org.apache.nifi.annotation.behavior.DynamicProperty; import org.apache.nifi.annotation.behavior.DynamicRelationship; import org.apache.nifi.annotation.behavior.InputRequirement; @@ -46,12 +33,23 @@ import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.io.InputStreamCallback; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.stream.io.StreamUtils; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.Pattern; + @SideEffectFree @SupportsBatching @InputRequirement(Requirement.INPUT_REQUIRED) @@ -96,30 +94,22 @@ public class RouteOnContent extends AbstractProcessor { .defaultValue("UTF-8") .build(); + private static final List PROPERTIES = List.of( + MATCH_REQUIREMENT, + CHARACTER_SET, + BUFFER_SIZE + ); + public static final Relationship REL_NO_MATCH = new Relationship.Builder() .name("unmatched") .description("FlowFiles that do not match any of the user-supplied regular expressions will be routed to this relationship") .build(); - private final AtomicReference> relationships = new AtomicReference<>(); - private List properties; - - @Override - protected void init(final ProcessorInitializationContext context) { - final Set relationships = new HashSet<>(); - relationships.add(REL_NO_MATCH); - this.relationships.set(Collections.unmodifiableSet(relationships)); - - final List properties = new ArrayList<>(); - properties.add(MATCH_REQUIREMENT); - properties.add(CHARACTER_SET); - properties.add(BUFFER_SIZE); - this.properties = Collections.unmodifiableList(properties); - } + private final AtomicReference> relationships = new AtomicReference<>(Set.of(REL_NO_MATCH)); @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteText.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteText.java index f48253b758..19ad4c254e 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteText.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/RouteText.java @@ -44,7 +44,6 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.io.InputStreamCallback; import org.apache.nifi.processor.io.OutputStreamCallback; @@ -131,7 +130,6 @@ public class RouteText extends AbstractProcessor { private static final String containsRegularExpressionValue = "Contains Regular Expression"; private static final String satisfiesExpression = "Satisfies Expression"; - public static final AllowableValue ROUTE_TO_MATCHING_PROPERTY_NAME = new AllowableValue(routePropertyNameValue, routePropertyNameValue, "Lines will be routed to each relationship whose corresponding expression evaluates to 'true'"); public static final AllowableValue ROUTE_TO_MATCHED_WHEN_ALL_PROPERTIES_MATCH = new AllowableValue(routeAllMatchValue, routeAllMatchValue, @@ -212,6 +210,15 @@ public class RouteText extends AbstractProcessor { .defaultValue("UTF-8") .build(); + private static final List PROPERTIES = List.of( + ROUTE_STRATEGY, + MATCH_STRATEGY, + CHARACTER_SET, + TRIM_WHITESPACE, + IGNORE_CASE, + GROUPING_REGEX + ); + public static final Relationship REL_ORIGINAL = new Relationship.Builder() .name("original") .description("The original input file will be routed to this destination when the lines have been successfully routed to 1 or more relationships") @@ -225,10 +232,10 @@ public class RouteText extends AbstractProcessor { .description("Data that satisfies the required user-defined rules will be routed to this Relationship") .build(); - private static Group EMPTY_GROUP = new Group(Collections.emptyList()); + private static final Group EMPTY_GROUP = new Group(Collections.emptyList()); - private AtomicReference> relationships = new AtomicReference<>(); - private List properties; + private final AtomicReference> relationships = + new AtomicReference<>(Set.of(REL_ORIGINAL, REL_NO_MATCH)); private volatile String configuredRouteStrategy = ROUTE_STRATEGY.getDefaultValue(); private volatile Set dynamicPropertyNames = new HashSet<>(); @@ -255,23 +262,6 @@ public class RouteText extends AbstractProcessor { r -> ignoreCase ? Pattern.compile(r, Pattern.CASE_INSENSITIVE) : Pattern.compile(r)); } - @Override - protected void init(final ProcessorInitializationContext context) { - final Set set = new HashSet<>(); - set.add(REL_ORIGINAL); - set.add(REL_NO_MATCH); - relationships = new AtomicReference<>(set); - - final List properties = new ArrayList<>(); - properties.add(ROUTE_STRATEGY); - properties.add(MATCH_STRATEGY); - properties.add(CHARACTER_SET); - properties.add(TRIM_WHITESPACE); - properties.add(IGNORE_CASE); - properties.add(GROUPING_REGEX); - this.properties = Collections.unmodifiableList(properties); - } - @Override public Set getRelationships() { return relationships.get(); @@ -279,7 +269,7 @@ public class RouteText extends AbstractProcessor { @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override @@ -309,7 +299,7 @@ public class RouteText extends AbstractProcessor { newDynamicPropertyNames.add(descriptor.getName()); } - this.dynamicPropertyNames = Collections.unmodifiableSet(newDynamicPropertyNames); + this.dynamicPropertyNames = Set.copyOf(newDynamicPropertyNames); } // formulate the new set of Relationships @@ -535,12 +525,11 @@ public class RouteText extends AbstractProcessor { final Group group = flowFileEntry.getKey(); final FlowFile flowFile = flowFileEntry.getValue(); - final Map attributes = new HashMap<>(2); - attributes.put(ROUTE_ATTRIBUTE_KEY, relationship.getName()); - attributes.put(GROUP_ATTRIBUTE_KEY, StringUtils.join(group.getCapturedValues(), ", ")); - logger.info("Created {} from {}; routing to relationship {}", flowFile, originalFlowFile, relationship.getName()); - FlowFile updatedFlowFile = session.putAllAttributes(flowFile, attributes); + FlowFile updatedFlowFile = session.putAllAttributes(flowFile, Map.of( + ROUTE_ATTRIBUTE_KEY, relationship.getName(), + GROUP_ATTRIBUTE_KEY, StringUtils.join(group.getCapturedValues(), ", ") + )); session.getProvenanceReporter().route(updatedFlowFile, entry.getKey()); session.transfer(updatedFlowFile, entry.getKey()); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SampleRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SampleRecord.java index 9e5d0d1d6b..1e229bf763 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SampleRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SampleRecord.java @@ -16,19 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Random; -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import org.apache.commons.lang3.Range; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.SideEffectFree; @@ -60,6 +47,18 @@ import org.apache.nifi.serialization.record.Record; import org.apache.nifi.serialization.record.RecordSchema; import org.apache.nifi.util.StringUtils; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + @SideEffectFree @SupportsBatching @Tags({"record", "sample", "reservoir", "range", "interval"}) @@ -96,7 +95,6 @@ public class SampleRecord extends AbstractProcessor { private static final Pattern RANGE_PATTERN = Pattern.compile("^([0-9]+)?(-)?([0-9]+)?"); private static final Pattern INTERVAL_PATTERN = Pattern.compile("([0-9]+)?(-)?([0-9]+)?(?:,|$)"); - static final PropertyDescriptor RECORD_READER_FACTORY = new PropertyDescriptor.Builder() .name("record-reader") .displayName("Record Reader") @@ -176,6 +174,17 @@ public class SampleRecord extends AbstractProcessor { .dependsOn(SAMPLING_STRATEGY, PROBABILISTIC_SAMPLING, RESERVOIR_SAMPLING) .build(); + private static final List PROPERTIES = List.of( + RECORD_READER_FACTORY, + RECORD_WRITER_FACTORY, + SAMPLING_STRATEGY, + SAMPLING_INTERVAL, + SAMPLING_RANGE, + SAMPLING_PROBABILITY, + RESERVOIR_SIZE, + RANDOM_SEED + ); + public static final Relationship REL_ORIGINAL = new Relationship.Builder() .name("original") .description("The original FlowFile is routed to this relationship if sampling is successful") @@ -192,36 +201,20 @@ public class SampleRecord extends AbstractProcessor { + "is not valid), the original FlowFile will be routed to this relationship") .build(); - private static final List properties; - private static final Set relationships; - - static { - final List props = new ArrayList<>(); - props.add(RECORD_READER_FACTORY); - props.add(RECORD_WRITER_FACTORY); - props.add(SAMPLING_STRATEGY); - props.add(SAMPLING_INTERVAL); - props.add(SAMPLING_RANGE); - props.add(SAMPLING_PROBABILITY); - props.add(RESERVOIR_SIZE); - props.add(RANDOM_SEED); - properties = Collections.unmodifiableList(props); - - final Set r = new HashSet<>(); - r.add(REL_SUCCESS); - r.add(REL_FAILURE); - r.add(REL_ORIGINAL); - relationships = Collections.unmodifiableSet(r); - } + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE, + REL_ORIGINAL + ); @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java index da50a9fb55..41779af18b 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanAttribute.java @@ -32,7 +32,6 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; @@ -44,8 +43,6 @@ import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -98,13 +95,12 @@ public class ScanAttribute extends AbstractProcessor { .addValidator(StandardValidators.createRegexValidator(0, 1, false)) .build(); - private List properties; - private Set relationships; - - private volatile Pattern dictionaryFilterPattern = null; - private volatile Pattern attributePattern = null; - private volatile Set dictionaryTerms = null; - private volatile SynchronousFileWatcher fileWatcher = null; + private static final List PROPERTIES = List.of( + DICTIONARY_FILE, + ATTRIBUTE_PATTERN, + MATCHING_CRITERIA, + DICTIONARY_FILTER + ); public static final Relationship REL_MATCHED = new Relationship.Builder() .name("matched") @@ -115,29 +111,24 @@ public class ScanAttribute extends AbstractProcessor { .description("FlowFiles whose attributes are not found in the dictionary will be routed to this relationship") .build(); - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(DICTIONARY_FILE); - properties.add(ATTRIBUTE_PATTERN); - properties.add(MATCHING_CRITERIA); - properties.add(DICTIONARY_FILTER); - this.properties = Collections.unmodifiableList(properties); + private static final Set RELATIONSHIPS = Set.of( + REL_MATCHED, + REL_UNMATCHED + ); - final Set relationships = new HashSet<>(); - relationships.add(REL_MATCHED); - relationships.add(REL_UNMATCHED); - this.relationships = Collections.unmodifiableSet(relationships); - } + private volatile Pattern dictionaryFilterPattern = null; + private volatile Pattern attributePattern = null; + private volatile Set dictionaryTerms = null; + private volatile SynchronousFileWatcher fileWatcher = null; @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @OnScheduled @@ -184,7 +175,7 @@ public class ScanAttribute extends AbstractProcessor { } } - return Collections.unmodifiableSet(terms); + return Set.copyOf(terms); } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java index b70de18f05..22fe514c94 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ScanContent.java @@ -31,7 +31,6 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.io.InputStreamCallback; @@ -52,8 +51,6 @@ import java.io.InputStreamReader; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -89,6 +86,11 @@ public class ScanContent extends AbstractProcessor { .defaultValue(TEXT_ENCODING) .build(); + private static final List PROPERTIES = List.of( + DICTIONARY, + DICTIONARY_ENCODING + ); + public static final Relationship REL_MATCH = new Relationship.Builder() .name("matched") .description("FlowFiles that match at least one " @@ -100,36 +102,25 @@ public class ScanContent extends AbstractProcessor { + "term in the dictionary are routed to this relationship") .build(); + private static final Set RELATIONSHIPS = Set.of( + REL_MATCH, + REL_NO_MATCH + ); + public static final Charset UTF8 = StandardCharsets.UTF_8; private final AtomicReference fileWatcherRef = new AtomicReference<>(); private final AtomicReference> searchRef = new AtomicReference<>(); private final ReentrantLock dictionaryUpdateLock = new ReentrantLock(); - private List properties; - private Set relationships; - - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(DICTIONARY); - properties.add(DICTIONARY_ENCODING); - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_MATCH); - relationships.add(REL_NO_MATCH); - this.relationships = Collections.unmodifiableSet(relationships); - } - @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override @@ -263,7 +254,7 @@ public class ScanContent extends AbstractProcessor { if (nextLine == null || nextLine.isEmpty()) { return null; } - return new SearchTerm<>(nextLine.getBytes("UTF-8")); + return new SearchTerm<>(nextLine.getBytes(StandardCharsets.UTF_8)); } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java index 75b5b9c04a..10a2020b0a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SegmentContent.java @@ -16,16 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; - import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.SideEffectFree; @@ -44,10 +34,16 @@ import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.util.StandardValidators; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; + @SideEffectFree @SupportsBatching @Tags({"segment", "split"}) @@ -82,6 +78,8 @@ public class SegmentContent extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); + private static final List PROPERTIES = List.of(SIZE); + public static final Relationship REL_SEGMENTS = new Relationship.Builder() .name("segments") .description("All segments will be sent to this relationship. If the file was small enough that it was not segmented, " @@ -92,29 +90,19 @@ public class SegmentContent extends AbstractProcessor { .description("The original FlowFile will be sent to this relationship") .build(); - private Set relationships; - private List propertyDescriptors; - - @Override - protected void init(final ProcessorInitializationContext context) { - final Set relationships = new HashSet<>(); - relationships.add(REL_SEGMENTS); - relationships.add(REL_ORIGINAL); - this.relationships = Collections.unmodifiableSet(relationships); - - final List descriptors = new ArrayList<>(); - descriptors.add(SIZE); - this.propertyDescriptors = Collections.unmodifiableList(descriptors); - } + private static final Set RELATIONSHIPS = Set.of( + REL_SEGMENTS, + REL_ORIGINAL + ); @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return propertyDescriptors; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java index 8e7629b5a6..5328e7435e 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitContent.java @@ -16,20 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.BufferedInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.atomic.AtomicReference; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.binary.Hex; import org.apache.nifi.annotation.behavior.InputRequirement; @@ -56,13 +42,25 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.io.InputStreamCallback; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.util.NaiveSearchRingBuffer; import org.apache.nifi.util.Tuple; +import java.io.BufferedInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicReference; + @SideEffectFree @SupportsBatching @Tags({"content", "split", "binary"}) @@ -119,6 +117,13 @@ public class SplitContent extends AbstractProcessor { .defaultValue(TRAILING_POSITION.getValue()) .build(); + private static final List PROPERTIES = List.of( + FORMAT, + BYTE_SEQUENCE, + KEEP_SEQUENCE, + BYTE_SEQUENCE_LOCATION + ); + public static final Relationship REL_SPLITS = new Relationship.Builder() .name("splits") .description("All Splits will be routed to the splits relationship") @@ -128,34 +133,21 @@ public class SplitContent extends AbstractProcessor { .description("The original file") .build(); - private Set relationships; - private List properties; + private static final Set RELATIONSHIPS = Set.of( + REL_SPLITS, + REL_ORIGINAL + ); private final AtomicReference byteSequence = new AtomicReference<>(); - @Override - protected void init(final ProcessorInitializationContext context) { - final Set relationships = new HashSet<>(); - relationships.add(REL_SPLITS); - relationships.add(REL_ORIGINAL); - this.relationships = Collections.unmodifiableSet(relationships); - - final List properties = new ArrayList<>(); - properties.add(FORMAT); - properties.add(BYTE_SEQUENCE); - properties.add(KEEP_SEQUENCE); - properties.add(BYTE_SEQUENCE_LOCATION); - this.properties = Collections.unmodifiableList(properties); - } - @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java index 3b788fd41e..d49e705c16 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitJson.java @@ -42,16 +42,12 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.util.StandardValidators; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -93,6 +89,12 @@ public class SplitJson extends AbstractJsonPathProcessor { .required(true) .build(); + private static final List PROPERTIES = List.of( + ARRAY_JSON_PATH_EXPRESSION, + NULL_VALUE_DEFAULT_REPRESENTATION, + MAX_STRING_LENGTH + ); + public static final Relationship REL_ORIGINAL = new Relationship.Builder() .name("original") .description("The original FlowFile that was split into segments. If the FlowFile fails processing, nothing will be sent to " @@ -108,36 +110,24 @@ public class SplitJson extends AbstractJsonPathProcessor { + "path does not exist), it will be routed to this relationship") .build(); - private List properties; - private Set relationships; + private static final Set RELATIONSHIPS = Set.of( + REL_ORIGINAL, + REL_SPLIT, + REL_FAILURE + ); private final AtomicReference JSON_PATH_REF = new AtomicReference<>(); private volatile String nullDefaultValue; private volatile Configuration jsonPathConfiguration; - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(ARRAY_JSON_PATH_EXPRESSION); - properties.add(NULL_VALUE_DEFAULT_REPRESENTATION); - properties.add(MAX_STRING_LENGTH); - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_ORIGINAL); - relationships.add(REL_SPLIT); - relationships.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(relationships); - } - @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override @@ -165,7 +155,7 @@ public class SplitJson extends AbstractJsonPathProcessor { }; String value = validationContext.getProperty(ARRAY_JSON_PATH_EXPRESSION).getValue(); - return Collections.singleton(validator.validate(ARRAY_JSON_PATH_EXPRESSION.getName(), value, validationContext)); + return Set.of(validator.validate(ARRAY_JSON_PATH_EXPRESSION.getName(), value, validationContext)); } @OnScheduled diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitRecord.java index 0b441c08d1..451333f836 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitRecord.java @@ -54,7 +54,6 @@ import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -100,6 +99,12 @@ public class SplitRecord extends AbstractProcessor { .required(true) .build(); + private static final List PROPERTIES = List.of( + RECORD_READER, + RECORD_WRITER, + RECORDS_PER_SPLIT + ); + static final Relationship REL_SPLITS = new Relationship.Builder() .name("splits") .description("The individual 'segments' of the original FlowFile will be routed to this relationship.") @@ -114,22 +119,20 @@ public class SplitRecord extends AbstractProcessor { + "the unchanged FlowFile will be routed to this relationship.") .build(); + private static final Set RELATIONSHIPS = Set.of( + REL_SPLITS, + REL_ORIGINAL, + REL_FAILURE + ); + @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(); - properties.add(RECORD_READER); - properties.add(RECORD_WRITER); - properties.add(RECORDS_PER_SPLIT); - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - final Set relationships = new HashSet<>(); - relationships.add(REL_SPLITS); - relationships.add(REL_ORIGINAL); - relationships.add(REL_FAILURE); - return relationships; + return RELATIONSHIPS; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java index f319b6d759..5936f129dd 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitText.java @@ -52,11 +52,8 @@ import java.io.InputStream; import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.ListIterator; @@ -141,6 +138,14 @@ public class SplitText extends AbstractProcessor { .defaultValue("true") .build(); + private static final List PROPERTIES = List.of( + LINE_SPLIT_COUNT, + FRAGMENT_MAX_SIZE, + HEADER_LINE_COUNT, + HEADER_MARKER, + REMOVE_TRAILING_NEWLINES + ); + public static final Relationship REL_ORIGINAL = new Relationship.Builder() .name("original") .description("The original input file will be routed to this destination when it has been successfully split into 1 or more files") @@ -154,22 +159,11 @@ public class SplitText extends AbstractProcessor { .description("If a file cannot be split for some reason, the original file will be routed to this destination and nothing will be routed elsewhere") .build(); - private static final List properties; - private static final Set relationships; - - static { - properties = Collections.unmodifiableList(Arrays.asList( - LINE_SPLIT_COUNT, - FRAGMENT_MAX_SIZE, - HEADER_LINE_COUNT, - HEADER_MARKER, - REMOVE_TRAILING_NEWLINES)); - - relationships = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( + private static final Set RELATIONSHIPS = Set.of( REL_ORIGINAL, REL_SPLITS, - REL_FAILURE))); - } + REL_FAILURE + ); private volatile boolean removeTrailingNewLines; @@ -183,7 +177,7 @@ public class SplitText extends AbstractProcessor { @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @OnScheduled @@ -280,7 +274,7 @@ public class SplitText extends AbstractProcessor { @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } /** @@ -300,7 +294,7 @@ public class SplitText extends AbstractProcessor { } int fragmentIndex = 1; // set to 1 to preserve the existing behavior *only*. Perhaps should be deprecated to follow the 0,1,2... scheme - if ((computedSplitsInfo.size() == 0) && (headerFlowFile != null)) { + if (computedSplitsInfo.isEmpty() && headerFlowFile != null) { FlowFile splitFlowFile = processSession.clone(sourceFlowFile, 0, headerFlowFile.getSize() - headerCrlfLength); splitFlowFile = this.updateAttributes(processSession, splitFlowFile, 0, splitFlowFile.getSize(), fragmentId, fragmentIndex++, sourceFlowFile.getAttribute(CoreAttributes.FILENAME.key())); @@ -390,7 +384,7 @@ public class SplitText extends AbstractProcessor { /** * Will generate {@link SplitInfo} for the next fragment that represents the * header of the future split. - * + *

* If split size is controlled by the amount of lines in the split then the * resulting {@link SplitInfo} line count will always be <= 'splitMaxLineCount'. It can only be less IF it reaches the EOF. * If split size is controlled by the {@link #maxSplitSize}, then the resulting {@link SplitInfo} line count @@ -434,7 +428,7 @@ public class SplitText extends AbstractProcessor { /** * Will generate {@link SplitInfo} for the next split. - * + *

* If split size is controlled by the amount of lines in the split then the resulting * {@link SplitInfo} line count will always be <= 'splitMaxLineCount'. * If split size is controlled by the {@link #maxSplitSize}, then the resulting {@link SplitInfo} diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java index e3b1804a60..073a713e36 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java @@ -16,33 +16,13 @@ */ package org.apache.nifi.processors.standard; -import static org.apache.nifi.flowfile.attributes.FragmentAttributes.FRAGMENT_COUNT; -import static org.apache.nifi.flowfile.attributes.FragmentAttributes.FRAGMENT_ID; -import static org.apache.nifi.flowfile.attributes.FragmentAttributes.FRAGMENT_INDEX; -import static org.apache.nifi.flowfile.attributes.FragmentAttributes.SEGMENT_ORIGINAL_FILENAME; -import static org.apache.nifi.flowfile.attributes.FragmentAttributes.copyAttributesToOriginal; - -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.TreeMap; -import java.util.UUID; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.text.StringEscapeUtils; -import org.apache.nifi.annotation.behavior.SystemResourceConsideration; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.SideEffectFree; import org.apache.nifi.annotation.behavior.SupportsBatching; import org.apache.nifi.annotation.behavior.SystemResource; +import org.apache.nifi.annotation.behavior.SystemResourceConsideration; import org.apache.nifi.annotation.behavior.WritesAttribute; import org.apache.nifi.annotation.behavior.WritesAttributes; import org.apache.nifi.annotation.documentation.CapabilityDescription; @@ -54,7 +34,6 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processors.standard.util.XmlElementNotifier; @@ -66,6 +45,26 @@ import org.xml.sax.InputSource; import org.xml.sax.Locator; import org.xml.sax.SAXException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.TreeMap; +import java.util.UUID; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.apache.nifi.flowfile.attributes.FragmentAttributes.FRAGMENT_COUNT; +import static org.apache.nifi.flowfile.attributes.FragmentAttributes.FRAGMENT_ID; +import static org.apache.nifi.flowfile.attributes.FragmentAttributes.FRAGMENT_INDEX; +import static org.apache.nifi.flowfile.attributes.FragmentAttributes.SEGMENT_ORIGINAL_FILENAME; +import static org.apache.nifi.flowfile.attributes.FragmentAttributes.copyAttributesToOriginal; + @SideEffectFree @SupportsBatching @Tags({"xml", "split"}) @@ -95,6 +94,8 @@ public class SplitXml extends AbstractProcessor { .defaultValue("1") .build(); + private static final List PROPERTIES = List.of(SPLIT_DEPTH); + public static final Relationship REL_ORIGINAL = new Relationship.Builder() .name("original") .description("The original FlowFile that was split into segments. If the FlowFile fails processing, nothing will be sent to this relationship") @@ -108,30 +109,20 @@ public class SplitXml extends AbstractProcessor { .description("If a FlowFile fails processing for any reason (for example, the FlowFile is not valid XML), it will be routed to this relationship") .build(); - private List properties; - private Set relationships; - - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(SPLIT_DEPTH); - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_ORIGINAL); - relationships.add(REL_SPLIT); - relationships.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(relationships); - } + private static final Set RELATIONSHIPS = Set.of( + REL_ORIGINAL, + REL_SPLIT, + REL_FAILURE + ); @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override @@ -333,7 +324,7 @@ public class SplitXml extends AbstractProcessor { private String prefixToNamespace(String prefix) { final String ns; - if (prefix.length() == 0) { + if (prefix.isEmpty()) { ns = "xmlns"; } else { ns = "xmlns:" + prefix; diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TailFile.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TailFile.java index 4044c25d40..1660e06dba 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TailFile.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TailFile.java @@ -16,7 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.util.Map.Entry; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.behavior.InputRequirement; @@ -78,6 +77,7 @@ import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -88,9 +88,8 @@ import java.util.zip.CRC32; import java.util.zip.CheckedInputStream; import java.util.zip.Checksum; -import static java.util.stream.Collectors.toList; -import static org.apache.nifi.expression.ExpressionLanguageScope.NONE; import static org.apache.nifi.expression.ExpressionLanguageScope.ENVIRONMENT; +import static org.apache.nifi.expression.ExpressionLanguageScope.NONE; import static org.apache.nifi.processor.util.StandardValidators.DATA_SIZE_VALIDATOR; import static org.apache.nifi.processor.util.StandardValidators.REGULAR_EXPRESSION_VALIDATOR; @@ -296,44 +295,48 @@ public class TailFile extends AbstractProcessor { .defaultValue("65536 B") .build(); + private static final List PROPERTIES = List.of( + MODE, + FILENAME, + ROLLING_FILENAME_PATTERN, + POST_ROLLOVER_TAIL_PERIOD, + BASE_DIRECTORY, + START_POSITION, + STATE_LOCATION, + RECURSIVE, + LOOKUP_FREQUENCY, + MAXIMUM_AGE, + REREAD_ON_NUL, + LINE_START_PATTERN, + PRE_ALLOCATED_BUFFER_SIZE, + MAX_BUFFER_LENGTH + ); + static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("All FlowFiles are routed to this Relationship.") .build(); + private static final Set RELATIONSHIPS = Set.of(REL_SUCCESS); + private volatile Map states = new HashMap<>(); - private volatile AtomicLong lastLookup = new AtomicLong(0L); - private volatile AtomicBoolean isMultiChanging = new AtomicBoolean(false); + private final AtomicLong lastLookup = new AtomicLong(0L); + private final AtomicBoolean isMultiChanging = new AtomicBoolean(false); private volatile boolean requireStateLookup = true; - private volatile ByteArrayOutputStream linesBuffer = new ByteArrayOutputStream(); + private final ByteArrayOutputStream linesBuffer = new ByteArrayOutputStream(); private volatile Pattern lineStartPattern; private volatile long maxBufferBytes; private volatile int preAllocatedBufferSize; @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(); - properties.add(MODE); - properties.add(FILENAME); - properties.add(ROLLING_FILENAME_PATTERN); - properties.add(POST_ROLLOVER_TAIL_PERIOD); - properties.add(BASE_DIRECTORY); - properties.add(START_POSITION); - properties.add(STATE_LOCATION); - properties.add(RECURSIVE); - properties.add(LOOKUP_FREQUENCY); - properties.add(MAXIMUM_AGE); - properties.add(REREAD_ON_NUL); - properties.add(LINE_START_PATTERN); - properties.add(PRE_ALLOCATED_BUFFER_SIZE); - properties.add(MAX_BUFFER_LENGTH); - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - return Collections.singleton(REL_SUCCESS); + return RELATIONSHIPS; } @Override @@ -414,7 +417,7 @@ public class TailFile extends AbstractProcessor { final String startPosition = context.getProperty(START_POSITION).getValue(); - if (!stateMap.getStateVersion().isPresent() || stateMap.toMap().isEmpty()) { + if (stateMap.getStateVersion().isEmpty() || stateMap.toMap().isEmpty()) { //state has been cleared or never stored so recover as 'empty state' initStates(filesToTail, Collections.emptyMap(), true, startPosition); recoverState(context, filesToTail, Collections.emptyMap()); @@ -424,7 +427,7 @@ public class TailFile extends AbstractProcessor { Map statesMap = stateMap.toMap(); if (statesMap.containsKey(TailFileState.StateKeys.FILENAME) - && !statesMap.keySet().stream().anyMatch(key -> key.startsWith(MAP_PREFIX))) { + && statesMap.keySet().stream().noneMatch(key -> key.startsWith(MAP_PREFIX))) { // If statesMap contains "filename" key without "file.0." prefix, // and there's no key with "file." prefix, then // it indicates that the statesMap is created with earlier version of NiFi. @@ -437,7 +440,7 @@ public class TailFile extends AbstractProcessor { // LENGTH is added from NiFi 1.1.0. Set the value with using the last position so that we can use existing state // to avoid sending duplicated log data after updating NiFi. migratedStatesMap.put(MAP_PREFIX + "0." + TailFileState.StateKeys.LENGTH, statesMap.get(TailFileState.StateKeys.POSITION)); - statesMap = Collections.unmodifiableMap(migratedStatesMap); + statesMap = Map.copyOf(migratedStatesMap); getLogger().info("statesMap has been migrated. {}", migratedStatesMap); } @@ -466,7 +469,7 @@ public class TailFile extends AbstractProcessor { } // first, we remove the files that are no longer present - final List toBeRemoved = new ArrayList(); + final List toBeRemoved = new ArrayList<>(); for (String file : states.keySet()) { if (!filesToTail.contains(file)) { toBeRemoved.add(file); @@ -738,7 +741,6 @@ public class TailFile extends AbstractProcessor { } catch (IOException e) { getLogger().error("Exception raised while attempting to cleanup session's state map", e); context.yield(); - return; } } @@ -746,9 +748,9 @@ public class TailFile extends AbstractProcessor { List keysToRemove = new ArrayList<>(); List filesToRemove = sessionStates.entrySet().stream() .filter(entry -> entry.getKey().endsWith("filename") - && !states.keySet().contains(entry.getValue())) + && !states.containsKey(entry.getValue())) .map(Entry::getKey) - .collect(toList()); + .toList(); for (String key : filesToRemove) { final String prefix = StringUtils.substringBefore(key, "filename"); @@ -949,12 +951,11 @@ public class TailFile extends AbstractProcessor { flowFileName = baseName + "." + position + "-" + positionHolder.get(); } - final Map attributes = new HashMap<>(3); - attributes.put(CoreAttributes.FILENAME.key(), flowFileName); - attributes.put(CoreAttributes.MIME_TYPE.key(), "text/plain"); - attributes.put("tailfile.original.path", tailFile); - flowFile = session.putAllAttributes(flowFile, attributes); - + flowFile = session.putAllAttributes(flowFile, Map.of( + CoreAttributes.FILENAME.key(), flowFileName, + CoreAttributes.MIME_TYPE.key(), "text/plain", + "tailfile.original.path", tailFile + )); session.getProvenanceReporter().receive(flowFile, file.toURI().toString(), "FlowFile contains bytes " + position + " through " + positionHolder.get() + " of source file", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos)); session.transfer(flowFile, REL_SUCCESS); @@ -1195,7 +1196,7 @@ public class TailFile extends AbstractProcessor { // Sort files based on last modified timestamp. If same timestamp, use filename as a secondary sort, as often // files that are rolled over are given a naming scheme that is lexicographically sort in the same order as the // timestamp, such as yyyy-MM-dd-HH-mm-ss - rolledOffFiles.sort(new Comparator() { + rolledOffFiles.sort(new Comparator<>() { @Override public int compare(final File o1, final File o2) { final int lastModifiedComp = Long.compare(o1.lastModified(), o2.lastModified()); @@ -1274,7 +1275,7 @@ public class TailFile extends AbstractProcessor { try { reader.close(); getLogger().debug("Closed FileChannel {}", reader); - } catch (final IOException ioe2) { + } catch (final IOException ignored) { } return null; @@ -1362,7 +1363,7 @@ public class TailFile extends AbstractProcessor { final boolean tailFirstFile; if (rolloverOccurred) { - final File firstFile = rolledOffFiles.get(0); + final File firstFile = rolledOffFiles.getFirst(); final long millisSinceModified = getCurrentTimeMs() - firstFile.lastModified(); final boolean fileGrew = firstFile.length() >= position; final boolean tailRolledFile = postRolloverTailMillis == 0 || millisSinceModified < postRolloverTailMillis; @@ -1372,7 +1373,7 @@ public class TailFile extends AbstractProcessor { } if (tailFirstFile) { - final File firstFile = rolledOffFiles.get(0); + final File firstFile = rolledOffFiles.getFirst(); final boolean consumed; if (shouldTailPostRollover) { @@ -1387,14 +1388,14 @@ public class TailFile extends AbstractProcessor { } if (consumed) { - rolledOffFiles.remove(0); + rolledOffFiles.removeFirst(); } } else if (tailingPostRollover && shouldTailPostRollover) { // This condition is encountered when we are tailing a file post-rollover, and we've now reached the point where the rolled file // has not changed. final List allRolledFiles = getRolledOffFiles(context, 0L, tailFile); allRolledFiles.sort(Comparator.comparing(File::lastModified).reversed()); - final File newestFile = allRolledFiles.get(0); + final File newestFile = allRolledFiles.getFirst(); // If we don't notice that the file has been modified, per the checks above, then we want to keep checking until the last modified // date has eclipsed the configured value for the Post-Rollover Tail Period. Until then, return false. Once that occurs, we will @@ -1480,11 +1481,11 @@ public class TailFile extends AbstractProcessor { if (flowFile.getSize() == 0L) { session.remove(flowFile); } else { - final Map attributes = new HashMap<>(3); - attributes.put(CoreAttributes.FILENAME.key(), fileToTail.getName()); - attributes.put(CoreAttributes.MIME_TYPE.key(), "text/plain"); - attributes.put("tailfile.original.path", tailFile); - flowFile = session.putAllAttributes(flowFile, attributes); + flowFile = session.putAllAttributes(flowFile, Map.of( + CoreAttributes.FILENAME.key(), fileToTail.getName(), + CoreAttributes.MIME_TYPE.key(), "text/plain", + "tailfile.original.path", tailFile + )); session.getProvenanceReporter().receive(flowFile, fileToTail.toURI().toString(), "FlowFile contains bytes 0 through " + position + " of source file", TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos)); @@ -1552,11 +1553,11 @@ public class TailFile extends AbstractProcessor { if (flowFile.getSize() == 0L) { session.remove(flowFile); } else { - final Map attributes = new HashMap<>(3); - attributes.put(CoreAttributes.FILENAME.key(), file.getName()); - attributes.put(CoreAttributes.MIME_TYPE.key(), "text/plain"); - attributes.put("tailfile.original.path", tfo.getState().getFilename()); - flowFile = session.putAllAttributes(flowFile, attributes); + flowFile = session.putAllAttributes(flowFile, Map.of( + CoreAttributes.FILENAME.key(), file.getName(), + CoreAttributes.MIME_TYPE.key(), "text/plain", + "tailfile.original.path", tfo.getState().getFilename()) + ); session.getProvenanceReporter().receive(flowFile, file.toURI().toString()); session.transfer(flowFile, REL_SUCCESS); getLogger().debug("Created {} from {} and routed to success", flowFile, file); @@ -1580,7 +1581,7 @@ public class TailFile extends AbstractProcessor { private TailFileState state; private Long expectedRecoveryChecksum; - private int filenameIndex; + private final int filenameIndex; private boolean tailFileChanged = true; public TailFileObject(final int index, final TailFileState fileState) { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java index f16cee592e..f3f33c8e4a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/TransformXml.java @@ -44,7 +44,6 @@ import org.apache.nifi.lookup.StringLookupService; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; @@ -75,7 +74,6 @@ import java.io.StringReader; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; @@ -161,6 +159,16 @@ public class TransformXml extends AbstractProcessor { .addValidator(StandardValidators.TIME_PERIOD_VALIDATOR) .build(); + private static final List PROPERTIES = List.of( + XSLT_FILE_NAME, + XSLT_CONTROLLER, + XSLT_CONTROLLER_KEY, + INDENT_OUTPUT, + SECURE_PROCESSING, + CACHE_SIZE, + CACHE_TTL_AFTER_LAST_ACCESS + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("The FlowFile with transformed content will be routed to this relationship") @@ -171,36 +179,21 @@ public class TransformXml extends AbstractProcessor { .description("If a FlowFile fails processing for any reason (for example, the FlowFile is not valid XML), it will be routed to this relationship") .build(); - private List properties; - private Set relationships; + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE + ); + private LoadingCache cache; - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(XSLT_FILE_NAME); - properties.add(XSLT_CONTROLLER); - properties.add(XSLT_CONTROLLER_KEY); - properties.add(INDENT_OUTPUT); - properties.add(SECURE_PROCESSING); - properties.add(CACHE_SIZE); - properties.add(CACHE_TTL_AFTER_LAST_ACCESS); - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_SUCCESS); - relationships.add(REL_FAILURE); - this.relationships = Collections.unmodifiableSet(relationships); - } - @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java index ab58c8cdd7..492c488114 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UnpackContent.java @@ -16,7 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.nio.charset.Charset; import net.lingala.zip4j.io.inputstream.ZipInputStream; import net.lingala.zip4j.model.LocalFileHeader; import net.lingala.zip4j.model.enums.EncryptionMethod; @@ -66,11 +65,11 @@ import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.charset.Charset; import java.nio.file.Path; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -196,6 +195,14 @@ public class UnpackContent extends AbstractProcessor { .addValidator(StandardValidators.BOOLEAN_VALIDATOR) .build(); + private static final List PROPERTIES = List.of( + PACKAGING_FORMAT, + ZIP_FILENAME_CHARSET, + FILE_FILTER, + PASSWORD, + ALLOW_STORED_ENTRIES_WITH_DATA_DESCRIPTOR + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("Unpacked FlowFiles are sent to this relationship") @@ -209,18 +216,10 @@ public class UnpackContent extends AbstractProcessor { .description("The original FlowFile is sent to this relationship when it cannot be unpacked for some reason") .build(); - private static final Set relationships = Set.of( - REL_SUCCESS, - REL_FAILURE, - REL_ORIGINAL - ); - - private static final List properties = List.of( - PACKAGING_FORMAT, - ZIP_FILENAME_CHARSET, - FILE_FILTER, - PASSWORD, - ALLOW_STORED_ENTRIES_WITH_DATA_DESCRIPTOR + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_FAILURE, + REL_ORIGINAL ); private Pattern fileFilter; @@ -228,15 +227,14 @@ public class UnpackContent extends AbstractProcessor { private Unpacker tarUnpacker; private Unpacker zipUnpacker; - @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @OnStopped @@ -389,23 +387,23 @@ public class UnpackContent extends AbstractProcessor { FlowFile unpackedFile = session.create(source); try { - final Map attributes = new HashMap<>(); - attributes.put(CoreAttributes.FILENAME.key(), file.getName()); - attributes.put(CoreAttributes.PATH.key(), filePathString); - attributes.put(CoreAttributes.MIME_TYPE.key(), OCTET_STREAM); - - attributes.put(FILE_PERMISSIONS_ATTRIBUTE, FileInfo.permissionToString(tarEntry.getMode())); - attributes.put(FILE_OWNER_ATTRIBUTE, String.valueOf(tarEntry.getUserName())); - attributes.put(FILE_GROUP_ATTRIBUTE, String.valueOf(tarEntry.getGroupName())); - final String timeAsString = DATE_TIME_FORMATTER.format(tarEntry.getModTime().toInstant()); - attributes.put(FILE_LAST_MODIFIED_TIME_ATTRIBUTE, timeAsString); - attributes.put(FILE_CREATION_TIME_ATTRIBUTE, timeAsString); - attributes.put(FRAGMENT_ID, fragmentId); - attributes.put(FRAGMENT_INDEX, String.valueOf(++fragmentCount)); + unpackedFile = session.putAllAttributes(unpackedFile, Map.of( + CoreAttributes.FILENAME.key(), file.getName(), + CoreAttributes.PATH.key(), filePathString, + CoreAttributes.MIME_TYPE.key(), OCTET_STREAM, - unpackedFile = session.putAllAttributes(unpackedFile, attributes); + FILE_PERMISSIONS_ATTRIBUTE, FileInfo.permissionToString(tarEntry.getMode()), + FILE_OWNER_ATTRIBUTE, String.valueOf(tarEntry.getUserName()), + FILE_GROUP_ATTRIBUTE, String.valueOf(tarEntry.getGroupName()), + + FILE_LAST_MODIFIED_TIME_ATTRIBUTE, timeAsString, + FILE_CREATION_TIME_ATTRIBUTE, timeAsString, + + FRAGMENT_ID, fragmentId, + FRAGMENT_INDEX, String.valueOf(++fragmentCount) + )); final long fileSize = tarEntry.getSize(); unpackedFile = session.write(unpackedFile, outputStream -> StreamUtils.copy(tarIn, outputStream, fileSize)); @@ -479,16 +477,15 @@ public class UnpackContent extends AbstractProcessor { FlowFile unpackedFile = session.create(sourceFlowFile); try { - final Map attributes = new HashMap<>(); - attributes.put(CoreAttributes.FILENAME.key(), file.getName()); - attributes.put(CoreAttributes.PATH.key(), parentDirectory); - attributes.put(CoreAttributes.MIME_TYPE.key(), OCTET_STREAM); - attributes.put(FILE_ENCRYPTION_METHOD_ATTRIBUTE, encryptionMethod.toString()); + unpackedFile = session.putAllAttributes(unpackedFile, Map.of( + CoreAttributes.FILENAME.key(), file.getName(), + CoreAttributes.PATH.key(), parentDirectory, + CoreAttributes.MIME_TYPE.key(), OCTET_STREAM, + FILE_ENCRYPTION_METHOD_ATTRIBUTE, encryptionMethod.toString(), - attributes.put(FRAGMENT_ID, fragmentId); - attributes.put(FRAGMENT_INDEX, String.valueOf(++fragmentIndex)); - - unpackedFile = session.putAllAttributes(unpackedFile, attributes); + FRAGMENT_ID, fragmentId, + FRAGMENT_INDEX, String.valueOf(++fragmentIndex) + )); unpackedFile = session.write(unpackedFile, outputStream -> StreamUtils.copy(zipInputStream, outputStream)); } finally { unpacked.add(unpackedFile); @@ -627,10 +624,10 @@ public class UnpackContent extends AbstractProcessor { ArrayList newList = new ArrayList<>(unpacked); unpacked.clear(); for (FlowFile ff : newList) { - final Map attributes = new HashMap<>(); - attributes.put(FRAGMENT_COUNT, String.valueOf(fragmentCount)); - attributes.put(SEGMENT_ORIGINAL_FILENAME, originalFilename); - FlowFile newFF = session.putAllAttributes(ff, attributes); + FlowFile newFF = session.putAllAttributes(ff, Map.of( + FRAGMENT_COUNT, String.valueOf(fragmentCount), + SEGMENT_ORIGINAL_FILENAME, originalFilename + )); unpacked.add(newFF); } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateCounter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateCounter.java index 7fe3ab6711..d1e39714ad 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateCounter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateCounter.java @@ -23,19 +23,15 @@ import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; -import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; +import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; -import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Set; -import java.util.HashSet; @InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) @Tags({"counter", "debug", "instrumentation"}) @@ -43,8 +39,6 @@ import java.util.HashSet; @ReadsAttribute(attribute = "counterName", description = "The name of the counter to update/get.") public class UpdateCounter extends AbstractProcessor { - - static final PropertyDescriptor COUNTER_NAME = new PropertyDescriptor.Builder() .name("counter-name") .displayName("Counter Name") @@ -66,35 +60,26 @@ public class UpdateCounter extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); + private static final List PROPERTIES = List.of( + COUNTER_NAME, + DELTA + ); + static final Relationship SUCCESS = new Relationship.Builder() .name("success") .description("Counter was updated/retrieved") .build(); - private List descriptors; - - private Set relationships; - - @Override - protected void init(final ProcessorInitializationContext context) { - final List descriptors = new ArrayList<>(); - descriptors.add(COUNTER_NAME); - descriptors.add(DELTA); - this.descriptors = Collections.unmodifiableList(descriptors); - - final Set relationships = new HashSet<>(); - relationships.add(SUCCESS); - this.relationships = Collections.unmodifiableSet(relationships); - } + private static final Set RELATIONSHIPS = Set.of(SUCCESS); @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return descriptors; + return PROPERTIES; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java index 7925327073..239ebede08 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateDatabaseTable.java @@ -65,7 +65,6 @@ import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -233,7 +232,9 @@ public class UpdateDatabaseTable extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); + protected static final Map dbAdapters; static final PropertyDescriptor DB_TYPE; + private static final List properties; // Relationships public static final Relationship REL_SUCCESS = new Relationship.Builder() @@ -246,9 +247,10 @@ public class UpdateDatabaseTable extends AbstractProcessor { .description("A FlowFile containing records routed to this relationship if the record could not be transmitted to the database.") .build(); - protected static final Map dbAdapters; - private static final List propertyDescriptors; - protected static Set relationships; + protected static Set relationships = Set.of( + REL_SUCCESS, + REL_FAILURE + ); static { dbAdapters = new HashMap<>(); @@ -270,32 +272,27 @@ public class UpdateDatabaseTable extends AbstractProcessor { .required(false) .build(); - final Set r = new HashSet<>(); - r.add(REL_SUCCESS); - r.add(REL_FAILURE); - relationships = Collections.unmodifiableSet(r); - - final List pds = new ArrayList<>(); - pds.add(RECORD_READER); - pds.add(DBCP_SERVICE); - pds.add(DB_TYPE); - pds.add(CATALOG_NAME); - pds.add(SCHEMA_NAME); - pds.add(TABLE_NAME); - pds.add(CREATE_TABLE); - pds.add(PRIMARY_KEY_FIELDS); - pds.add(TRANSLATE_FIELD_NAMES); - pds.add(UPDATE_FIELD_NAMES); - pds.add(RECORD_WRITER_FACTORY); - pds.add(QUOTE_TABLE_IDENTIFIER); - pds.add(QUOTE_COLUMN_IDENTIFIERS); - pds.add(QUERY_TIMEOUT); - propertyDescriptors = Collections.unmodifiableList(pds); + properties = List.of( + RECORD_READER, + DBCP_SERVICE, + DB_TYPE, + CATALOG_NAME, + SCHEMA_NAME, + TABLE_NAME, + CREATE_TABLE, + PRIMARY_KEY_FIELDS, + TRANSLATE_FIELD_NAMES, + UPDATE_FIELD_NAMES, + RECORD_WRITER_FACTORY, + QUOTE_TABLE_IDENTIFIER, + QUOTE_COLUMN_IDENTIFIERS, + QUERY_TIMEOUT + ); } @Override protected List getSupportedPropertyDescriptors() { - return propertyDescriptors; + return properties; } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateRecord.java index 42a020b273..85046b4922 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/UpdateRecord.java @@ -57,6 +57,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -196,7 +197,7 @@ public class UpdateRecord extends AbstractRecordProcessor { return Collections.emptyList(); } - return Collections.singleton(new ValidationResult.Builder() + return Set.of(new ValidationResult.Builder() .subject("User-defined Properties") .valid(false) .explanation("At least one RecordPath must be specified") @@ -272,14 +273,14 @@ public class UpdateRecord extends AbstractRecordProcessor { } private boolean isReplacingRoot(final List destinationFields) { - return destinationFields.size() == 1 && !destinationFields.get(0).getParentRecord().isPresent(); + return destinationFields.size() == 1 && destinationFields.getFirst().getParentRecord().isEmpty(); } private Record processRelativePath(final RecordPath replacementRecordPath, final Stream destinationFields, Record record) { final List destinationFieldValues = destinationFields.collect(Collectors.toList()); if (isReplacingRoot(destinationFieldValues)) { - final List selectedFields = getSelectedFields(replacementRecordPath, destinationFieldValues.get(0), record); + final List selectedFields = getSelectedFields(replacementRecordPath, destinationFieldValues.getFirst(), record); record = updateRecord(destinationFieldValues, selectedFields, record); } else { for (final FieldValue fieldVal : destinationFieldValues) { @@ -326,8 +327,7 @@ public class UpdateRecord extends AbstractRecordProcessor { } private void updateFieldValue(final FieldValue fieldValue, final Object replacement) { - if (replacement instanceof FieldValue) { - final FieldValue replacementFieldValue = (FieldValue) replacement; + if (replacement instanceof FieldValue replacementFieldValue) { fieldValue.updateValue(replacementFieldValue.getValue(), replacementFieldValue.getField().getDataType()); } else { fieldValue.updateValue(replacement); @@ -354,7 +354,7 @@ public class UpdateRecord extends AbstractRecordProcessor { if (selectedFields.isEmpty()) { return null; } else { - return selectedFields.get(0); + return selectedFields.getFirst(); } } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateCsv.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateCsv.java index 35294ffc04..68cee0515a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateCsv.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateCsv.java @@ -16,20 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.OutputStream; -import java.io.Reader; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; - import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; import org.apache.nifi.annotation.behavior.SideEffectFree; @@ -49,7 +35,6 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.io.InputStreamCallback; import org.apache.nifi.processor.io.OutputStreamCallback; @@ -82,6 +67,17 @@ import org.supercsv.io.CsvListReader; import org.supercsv.prefs.CsvPreference; import org.supercsv.util.CsvContext; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.io.Reader; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + @SideEffectFree @SupportsBatching @InputRequirement(Requirement.INPUT_REQUIRED) @@ -96,19 +92,20 @@ import org.supercsv.util.CsvContext; }) public class ValidateCsv extends AbstractProcessor { - private final static List allowedOperators = Arrays.asList("ParseBigDecimal", "ParseBool", "ParseChar", "ParseDate", - "ParseDouble", "ParseInt", "ParseLong", "Optional", "DMinMax", "Equals", "ForbidSubStr", "LMinMax", "NotNull", "Null", - "RequireHashCode", "RequireSubStr", "Strlen", "StrMinMax", "StrNotNullOrEmpty", "StrRegEx", "Unique", - "UniqueHashCode", "IsIncludedIn"); + private final static List ALLOWED_OPERATORS = List.of( + "ParseBigDecimal", "ParseBool", "ParseChar", "ParseDate", "ParseDouble", "ParseInt", "ParseLong", + "Optional", "DMinMax", "Equals", "ForbidSubStr", "LMinMax", "NotNull", "Null", "RequireHashCode", "RequireSubStr", + "Strlen", "StrMinMax", "StrNotNullOrEmpty", "StrRegEx", "Unique", "UniqueHashCode", "IsIncludedIn" + ); - private static final String routeWholeFlowFile = "FlowFile validation"; - private static final String routeLinesIndividually = "Line by line validation"; + private static final String ROUTE_WHOLE_FLOW_FILE = "FlowFile validation"; + private static final String ROUTE_LINES_INDIVIDUALLY = "Line by line validation"; - public static final AllowableValue VALIDATE_WHOLE_FLOWFILE = new AllowableValue(routeWholeFlowFile, routeWholeFlowFile, + public static final AllowableValue VALIDATE_WHOLE_FLOWFILE = new AllowableValue(ROUTE_WHOLE_FLOW_FILE, ROUTE_WHOLE_FLOW_FILE, "As soon as an error is found in the CSV file, the validation will stop and the whole flow file will be routed to the 'invalid'" + " relationship. This option offers best performances."); - public static final AllowableValue VALIDATE_LINES_INDIVIDUALLY = new AllowableValue(routeLinesIndividually, routeLinesIndividually, + public static final AllowableValue VALIDATE_LINES_INDIVIDUALLY = new AllowableValue(ROUTE_LINES_INDIVIDUALLY, ROUTE_LINES_INDIVIDUALLY, "In case an error is found, the input CSV file will be split into two FlowFiles: one routed to the 'valid' " + "relationship containing all the correct lines and one routed to the 'invalid' relationship containing all " + "the incorrect lines. Take care if choosing this option while using Unique cell processors in schema definition:" @@ -119,7 +116,7 @@ public class ValidateCsv extends AbstractProcessor { .displayName("Schema") .description("The schema to be used for validation. Is expected a comma-delimited string representing the cell " + "processors to apply. The following cell processors are allowed in the schema definition: " - + allowedOperators.toString() + ". Note: cell processors cannot be nested except with Optional.") + + ALLOWED_OPERATORS + ". Note: cell processors cannot be nested except with Optional.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_EL_VALIDATOR) @@ -188,6 +185,16 @@ public class ValidateCsv extends AbstractProcessor { .defaultValue("false") .build(); + private static final List PROPERTIES = List.of( + SCHEMA, + HEADER, + DELIMITER_CHARACTER, + QUOTE_CHARACTER, + END_OF_LINE_CHARACTER, + VALIDATION_STRATEGY, + INCLUDE_ALL_VIOLATIONS + ); + public static final Relationship REL_VALID = new Relationship.Builder() .name("valid") .description("FlowFiles that are successfully validated against the schema are routed to this relationship") @@ -197,35 +204,19 @@ public class ValidateCsv extends AbstractProcessor { .description("FlowFiles that are not valid according to the specified schema are routed to this relationship") .build(); - private List properties; - private Set relationships; - - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(SCHEMA); - properties.add(HEADER); - properties.add(DELIMITER_CHARACTER); - properties.add(QUOTE_CHARACTER); - properties.add(END_OF_LINE_CHARACTER); - properties.add(VALIDATION_STRATEGY); - properties.add(INCLUDE_ALL_VIOLATIONS); - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_VALID); - relationships.add(REL_INVALID); - this.relationships = Collections.unmodifiableSet(relationships); - } + private static final Set RELATIONSHIPS = Set.of( + REL_VALID, + REL_INVALID + ); @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @Override @@ -236,7 +227,7 @@ public class ValidateCsv extends AbstractProcessor { String subject = SCHEMA.getName(); if (context.isExpressionLanguageSupported(subject) && context.isExpressionLanguagePresent(schema)) { - return Collections.singletonList(new ValidationResult.Builder().subject(subject).input(schema).explanation("Expression Language Present").valid(true).build()); + return List.of(new ValidationResult.Builder().subject(subject).input(schema).explanation("Expression Language Present").valid(true).build()); } // If no Expression Language is present, try parsing the schema try { @@ -272,15 +263,15 @@ public class ValidateCsv extends AbstractProcessor { List processorsList = new ArrayList<>(); String remaining = schema; - while (remaining.length() > 0) { + while (!remaining.isEmpty()) { remaining = setProcessor(remaining, processorsList); } - return processorsList.toArray(new CellProcessor[processorsList.size()]); + return processorsList.toArray(new CellProcessor[0]); } private String setProcessor(String remaining, List processorsList) { - StringBuffer buffer = new StringBuffer(); + StringBuilder buffer = new StringBuilder(); String inputString = remaining; int i = 0; int opening = 0; diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateJson.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateJson.java index 29a10456e9..88208ec129 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateJson.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateJson.java @@ -131,7 +131,7 @@ public class ValidateJson extends AbstractProcessor { .displayName("Schema Access Strategy") .description("Specifies how to obtain the schema that is to be used for interpreting the data.") .allowableValues(JsonSchemaStrategy.class) - .defaultValue(JsonSchemaStrategy.SCHEMA_CONTENT_PROPERTY.getValue()) + .defaultValue(JsonSchemaStrategy.SCHEMA_CONTENT_PROPERTY) .required(true) .build(); @@ -229,15 +229,15 @@ public class ValidateJson extends AbstractProcessor { @Override protected Collection customValidate(ValidationContext validationContext) { Collection validationResults = new ArrayList<>(); - final String schemaAccessStrategy = getSchemaAccessStrategy(validationContext); - if (isNameStrategy(validationContext) && !validationContext.getProperty(SCHEMA_REGISTRY).isSet()) { + final JsonSchemaStrategy schemaAccessStrategy = getSchemaAccessStrategy(validationContext); + if (schemaAccessStrategy.equals(JsonSchemaStrategy.SCHEMA_NAME_PROPERTY) && !validationContext.getProperty(SCHEMA_REGISTRY).isSet()) { validationResults.add(new ValidationResult.Builder() .subject(SCHEMA_REGISTRY.getDisplayName()) .explanation(getPropertyValidateMessage(schemaAccessStrategy, SCHEMA_REGISTRY)) .valid(false) .build()); - } else if (isContentStrategy(validationContext) && !validationContext.getProperty(SCHEMA_CONTENT).isSet()) { + } else if (schemaAccessStrategy.equals(JsonSchemaStrategy.SCHEMA_CONTENT_PROPERTY) && !validationContext.getProperty(SCHEMA_CONTENT).isSet()) { validationResults.add(new ValidationResult.Builder() .subject(SCHEMA_CONTENT.getDisplayName()) .explanation(getPropertyValidateMessage(schemaAccessStrategy, SCHEMA_CONTENT)) @@ -250,13 +250,15 @@ public class ValidateJson extends AbstractProcessor { @OnScheduled public void onScheduled(final ProcessContext context) throws IOException { - if (isNameStrategy(context)) { - jsonSchemaRegistry = context.getProperty(SCHEMA_REGISTRY).asControllerService(JsonSchemaRegistry.class); - } else if (isContentStrategy(context)) { - try (final InputStream inputStream = context.getProperty(SCHEMA_CONTENT).asResource().read()) { - final SchemaVersion schemaVersion = SchemaVersion.valueOf(context.getProperty(SCHEMA_VERSION).getValue()); - final JsonSchemaFactory factory = schemaFactories.get(schemaVersion); - schema = factory.getSchema(inputStream); + switch (getSchemaAccessStrategy(context)) { + case SCHEMA_NAME_PROPERTY -> + jsonSchemaRegistry = context.getProperty(SCHEMA_REGISTRY).asControllerService(JsonSchemaRegistry.class); + case SCHEMA_CONTENT_PROPERTY -> { + try (final InputStream inputStream = context.getProperty(SCHEMA_CONTENT).asResource().read()) { + final SchemaVersion schemaVersion = SchemaVersion.valueOf(context.getProperty(SCHEMA_VERSION).getValue()); + final JsonSchemaFactory factory = schemaFactories.get(schemaVersion); + schema = factory.getSchema(inputStream); + } } } } @@ -268,7 +270,8 @@ public class ValidateJson extends AbstractProcessor { return; } - if (isNameStrategy(context)) { + final JsonSchemaStrategy schemaAccessStrategy = getSchemaAccessStrategy(context); + if (schemaAccessStrategy.equals(JsonSchemaStrategy.SCHEMA_NAME_PROPERTY)) { try { final String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions(flowFile).getValue(); final JsonSchema jsonSchema = jsonSchemaRegistry.retrieveSchema(schemaName); @@ -304,21 +307,11 @@ public class ValidateJson extends AbstractProcessor { } } - private String getPropertyValidateMessage(String schemaAccessStrategy, PropertyDescriptor property) { - return "The '" + schemaAccessStrategy + "' Schema Access Strategy requires that the " + property.getDisplayName() + " property be set."; + private String getPropertyValidateMessage(JsonSchemaStrategy schemaAccessStrategy, PropertyDescriptor property) { + return "The '" + schemaAccessStrategy.getValue() + "' Schema Access Strategy requires that the " + property.getDisplayName() + " property be set."; } - private boolean isNameStrategy(PropertyContext context) { - final String schemaAccessStrategy = getSchemaAccessStrategy(context); - return JsonSchemaStrategy.SCHEMA_NAME_PROPERTY.getValue().equals(schemaAccessStrategy); - } - - private String getSchemaAccessStrategy(PropertyContext context) { - return context.getProperty(SCHEMA_ACCESS_STRATEGY).getValue(); - } - - private boolean isContentStrategy(PropertyContext context) { - final String schemaAccessStrategy = getSchemaAccessStrategy(context); - return JsonSchemaStrategy.SCHEMA_CONTENT_PROPERTY.getValue().equals(schemaAccessStrategy); + private JsonSchemaStrategy getSchemaAccessStrategy(PropertyContext context) { + return context.getProperty(SCHEMA_ACCESS_STRATEGY).asAllowableValue(JsonSchemaStrategy.class); } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateRecord.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateRecord.java index cd34f1433b..26466bac09 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateRecord.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateRecord.java @@ -65,7 +65,6 @@ import org.apache.nifi.serialization.record.validation.ValidationError; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; -import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -213,6 +212,21 @@ public class ValidateRecord extends AbstractProcessor { .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR) .build(); + private static final List PROPERTIES = List.of( + RECORD_READER, + RECORD_WRITER, + INVALID_RECORD_WRITER, + SCHEMA_ACCESS_STRATEGY, + SCHEMA_REGISTRY, + SCHEMA_NAME, + SCHEMA_TEXT, + ALLOW_EXTRA_FIELDS, + STRICT_TYPE_CHECKING, + COERCE_TYPES, + VALIDATION_DETAILS_ATTRIBUTE_NAME, + MAX_VALIDATION_DETAILS_LENGTH + ); + static final Relationship REL_VALID = new Relationship.Builder() .name("valid") .description("Records that are valid according to the schema will be routed to this relationship") @@ -226,32 +240,20 @@ public class ValidateRecord extends AbstractProcessor { .description("If the records cannot be read, validated, or written, for any reason, the original FlowFile will be routed to this relationship") .build(); + private static final Set RELATIONSHIPS = Set.of( + REL_VALID, + REL_INVALID, + REL_FAILURE + ); @Override protected List getSupportedPropertyDescriptors() { - final List properties = new ArrayList<>(); - properties.add(RECORD_READER); - properties.add(RECORD_WRITER); - properties.add(INVALID_RECORD_WRITER); - properties.add(SCHEMA_ACCESS_STRATEGY); - properties.add(SCHEMA_REGISTRY); - properties.add(SCHEMA_NAME); - properties.add(SCHEMA_TEXT); - properties.add(ALLOW_EXTRA_FIELDS); - properties.add(STRICT_TYPE_CHECKING); - properties.add(COERCE_TYPES); - properties.add(VALIDATION_DETAILS_ATTRIBUTE_NAME); - properties.add(MAX_VALIDATION_DETAILS_LENGTH); - return properties; + return PROPERTIES; } @Override public Set getRelationships() { - final Set relationships = new HashSet<>(); - relationships.add(REL_VALID); - relationships.add(REL_INVALID); - relationships.add(REL_FAILURE); - return relationships; + return RELATIONSHIPS; } @Override @@ -259,7 +261,7 @@ public class ValidateRecord extends AbstractProcessor { final String schemaAccessStrategy = validationContext.getProperty(SCHEMA_ACCESS_STRATEGY).getValue(); if (schemaAccessStrategy.equals(SCHEMA_NAME_PROPERTY.getValue())) { if (!validationContext.getProperty(SCHEMA_REGISTRY).isSet()) { - return Collections.singleton(new ValidationResult.Builder() + return Set.of(new ValidationResult.Builder() .subject("Schema Registry") .valid(false) .explanation("If the Schema Access Strategy is set to \"Use 'Schema Name' Property\", the Schema Registry property must also be set") @@ -268,7 +270,7 @@ public class ValidateRecord extends AbstractProcessor { final SchemaRegistry registry = validationContext.getProperty(SCHEMA_REGISTRY).asControllerService(SchemaRegistry.class); if (!registry.getSuppliedSchemaFields().contains(SchemaField.SCHEMA_NAME)) { - return Collections.singleton(new ValidationResult.Builder() + return Set.of(new ValidationResult.Builder() .subject("Schema Registry") .valid(false) .explanation("The configured Schema Registry does not support accessing schemas by name") @@ -398,7 +400,7 @@ public class ValidateRecord extends AbstractProcessor { } if (!extraFields.isEmpty()) { - if (errorBuilder.length() > 0) { + if (!errorBuilder.isEmpty()) { errorBuilder.append("; "); } @@ -407,7 +409,7 @@ public class ValidateRecord extends AbstractProcessor { } if (!invalidFields.isEmpty()) { - if (errorBuilder.length() > 0) { + if (!errorBuilder.isEmpty()) { errorBuilder.append("; "); } @@ -416,7 +418,7 @@ public class ValidateRecord extends AbstractProcessor { } if (!otherProblems.isEmpty()) { - if (errorBuilder.length() > 0) { + if (!errorBuilder.isEmpty()) { errorBuilder.append("; "); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java index 6475e02f87..d46613c234 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ValidateXml.java @@ -39,14 +39,13 @@ import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; -import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.xml.processing.ProcessingException; import org.apache.nifi.xml.processing.stream.StandardXMLStreamReaderProvider; import org.apache.nifi.xml.processing.stream.XMLStreamReaderProvider; -import org.apache.nifi.xml.processing.validation.StandardSchemaValidator; import org.apache.nifi.xml.processing.validation.SchemaValidator; +import org.apache.nifi.xml.processing.validation.StandardSchemaValidator; import org.xml.sax.SAXException; import javax.xml.stream.XMLStreamException; @@ -60,9 +59,6 @@ import java.io.ByteArrayInputStream; import java.io.InputStream; import java.net.URL; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; @@ -111,6 +107,11 @@ public class ValidateXml extends AbstractProcessor { .addValidator(StandardValidators.ATTRIBUTE_KEY_VALIDATOR) .build(); + private static final List PROPERTIES = List.of( + SCHEMA_FILE, + XML_SOURCE_ATTRIBUTE + ); + public static final Relationship REL_VALID = new Relationship.Builder() .name("valid") .description("FlowFiles that are successfully validated against the schema, if provided, or verified to be well-formed XML are routed to this relationship") @@ -120,37 +121,25 @@ public class ValidateXml extends AbstractProcessor { .description("FlowFiles that are not valid according to the specified schema or contain invalid XML are routed to this relationship") .build(); + private static final Set RELATIONSHIPS = Set.of( + REL_VALID, + REL_INVALID + ); + private static final String SCHEMA_LANGUAGE = "http://www.w3.org/2001/XMLSchema"; - private static final SchemaValidator SCHEMA_VALIDATOR = new StandardSchemaValidator(); - private static final XMLStreamReaderProvider READER_PROVIDER = new StandardXMLStreamReaderProvider(); - private List properties; - private Set relationships; private final AtomicReference schemaRef = new AtomicReference<>(); - @Override - protected void init(final ProcessorInitializationContext context) { - final List properties = new ArrayList<>(); - properties.add(SCHEMA_FILE); - properties.add(XML_SOURCE_ATTRIBUTE); - this.properties = Collections.unmodifiableList(properties); - - final Set relationships = new HashSet<>(); - relationships.add(REL_VALID); - relationships.add(REL_INVALID); - this.relationships = Collections.unmodifiableSet(relationships); - } - @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override protected List getSupportedPropertyDescriptors() { - return properties; + return PROPERTIES; } @OnScheduled @@ -188,7 +177,7 @@ public class ValidateXml extends AbstractProcessor { validate(inputStream); } else { // If XML source attribute is not set, validate flowfile content - session.read(flowFile, inputStream -> validate(inputStream)); + session.read(flowFile, this::validate); } } catch (final RuntimeException e) { valid.set(false); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Wait.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Wait.java index b321d93c55..b16b2ba3fc 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Wait.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/Wait.java @@ -16,24 +16,6 @@ */ package org.apache.nifi.processors.standard; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Consumer; -import java.util.function.Function; -import java.util.function.Supplier; -import java.util.stream.Collectors; - import org.apache.commons.lang3.StringUtils; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.InputRequirement.Requirement; @@ -61,6 +43,22 @@ import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processors.standard.WaitNotifyProtocol.Signal; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collectors; + import static org.apache.nifi.processor.FlowFileFilter.FlowFileFilterResult.ACCEPT_AND_CONTINUE; import static org.apache.nifi.processor.FlowFileFilter.FlowFileFilterResult.ACCEPT_AND_TERMINATE; import static org.apache.nifi.processor.FlowFileFilter.FlowFileFilterResult.REJECT_AND_CONTINUE; @@ -239,6 +237,19 @@ public class Wait extends AbstractProcessor { .expressionLanguageSupported(ExpressionLanguageScope.NONE) .build(); + private static final List PROPERTIES = List.of( + RELEASE_SIGNAL_IDENTIFIER, + TARGET_SIGNAL_COUNT, + SIGNAL_COUNTER_NAME, + WAIT_BUFFER_COUNT, + RELEASABLE_FLOWFILE_COUNT, + EXPIRATION_DURATION, + DISTRIBUTED_CACHE_SERVICE, + ATTRIBUTE_COPY_MODE, + WAIT_MODE, + WAIT_PENALTY_DURATION + ); + public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("A FlowFile with a matching release signal in the cache will be routed to this relationship") @@ -259,38 +270,23 @@ public class Wait extends AbstractProcessor { .description("A FlowFile that has exceeded the configured Expiration Duration will be routed to this relationship") .build(); - private final Set relationships; + private static final Set RELATIONSHIPS = Set.of( + REL_SUCCESS, + REL_WAIT, + REL_EXPIRED, + REL_FAILURE + ); private final Map signalIdPenalties = new HashMap<>(); - public Wait() { - final Set rels = new HashSet<>(); - rels.add(REL_SUCCESS); - rels.add(REL_WAIT); - rels.add(REL_EXPIRED); - rels.add(REL_FAILURE); - relationships = Collections.unmodifiableSet(rels); - } - @Override protected List getSupportedPropertyDescriptors() { - final List descriptors = new ArrayList<>(); - descriptors.add(RELEASE_SIGNAL_IDENTIFIER); - descriptors.add(TARGET_SIGNAL_COUNT); - descriptors.add(SIGNAL_COUNTER_NAME); - descriptors.add(WAIT_BUFFER_COUNT); - descriptors.add(RELEASABLE_FLOWFILE_COUNT); - descriptors.add(EXPIRATION_DURATION); - descriptors.add(DISTRIBUTED_CACHE_SERVICE); - descriptors.add(ATTRIBUTE_COPY_MODE); - descriptors.add(WAIT_MODE); - descriptors.add(WAIT_PENALTY_DURATION); - return descriptors; + return PROPERTIES; } @Override public Set getRelationships() { - return relationships; + return RELATIONSHIPS; } @Override @@ -470,14 +466,14 @@ public class Wait extends AbstractProcessor { if (candidates.isEmpty()) { targetCounterName = context.getProperty(SIGNAL_COUNTER_NAME).evaluateAttributeExpressions(flowFile).getValue(); try { - targetCount = Long.valueOf(context.getProperty(TARGET_SIGNAL_COUNT).evaluateAttributeExpressions(flowFile).getValue()); + targetCount = Long.parseLong(context.getProperty(TARGET_SIGNAL_COUNT).evaluateAttributeExpressions(flowFile).getValue()); } catch (final NumberFormatException e) { transferToFailure.accept(flowFile); logger.error("Failed to parse targetCount when processing {} due to {}", flowFile, e, e); continue; } try { - releasableFlowFileCount = Integer.valueOf(context.getProperty(RELEASABLE_FLOWFILE_COUNT).evaluateAttributeExpressions(flowFile).getValue()); + releasableFlowFileCount = Integer.parseInt(context.getProperty(RELEASABLE_FLOWFILE_COUNT).evaluateAttributeExpressions(flowFile).getValue()); } catch (final NumberFormatException e) { transferToFailure.accept(flowFile); logger.error("Failed to parse releasableFlowFileCount when processing {} due to {}", flowFile, e, e); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/calcite/RecordPathFunction.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/calcite/RecordPathFunction.java index e0d84b3f94..5d96fee5af 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/calcite/RecordPathFunction.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/calcite/RecordPathFunction.java @@ -40,7 +40,7 @@ import java.util.stream.Stream; public class RecordPathFunction { private static final RecordField ROOT_RECORD_FIELD = new RecordField("root", RecordFieldType.MAP.getMapDataType(RecordFieldType.STRING.getDataType())); - private static final RecordSchema ROOT_RECORD_SCHEMA = new SimpleRecordSchema(Collections.singletonList(ROOT_RECORD_FIELD)); + private static final RecordSchema ROOT_RECORD_SCHEMA = new SimpleRecordSchema(List.of(ROOT_RECORD_FIELD)); private static final RecordField PARENT_RECORD_FIELD = new RecordField("root", RecordFieldType.RECORD.getRecordDataType(ROOT_RECORD_SCHEMA)); protected static final RecordPathCache RECORD_PATH_CACHE = new RecordPathCache(100); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/calcite/RecordPathFunctions.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/calcite/RecordPathFunctions.java index b21c951329..e9089fe79f 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/calcite/RecordPathFunctions.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/calcite/RecordPathFunctions.java @@ -50,7 +50,7 @@ public class RecordPathFunctions { public static class ObjectRecordPath extends RecordPathFunction { private static final RecordField ROOT_RECORD_FIELD = new RecordField("root", RecordFieldType.MAP.getMapDataType(RecordFieldType.STRING.getDataType())); - private static final RecordSchema ROOT_RECORD_SCHEMA = new SimpleRecordSchema(Collections.singletonList(ROOT_RECORD_FIELD)); + private static final RecordSchema ROOT_RECORD_SCHEMA = new SimpleRecordSchema(List.of(ROOT_RECORD_FIELD)); private static final RecordField PARENT_RECORD_FIELD = new RecordField("root", RecordFieldType.RECORD.getRecordDataType(ROOT_RECORD_SCHEMA)); @@ -124,7 +124,7 @@ public class RecordPathFunctions { } if (selectedFields.size() == 1) { - return selectedFields.get(0).getValue(); + return selectedFields.getFirst().getValue(); } return selectedFields.stream() diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/DatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/DatabaseAdapter.java index 0192102896..156cd1285a 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/DatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/DatabaseAdapter.java @@ -19,7 +19,6 @@ package org.apache.nifi.processors.standard.db; import java.sql.JDBCType; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; @@ -209,7 +208,7 @@ public interface DatabaseAdapter { .append(String.join(", ", columnsAndDatatypes)) .append(") "); - return Collections.singletonList(createTableStatement.toString()); + return List.of(createTableStatement.toString()); } /** diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MSSQLDatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MSSQLDatabaseAdapter.java index 1e95fefa6a..851268eaec 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MSSQLDatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MSSQLDatabaseAdapter.java @@ -21,7 +21,6 @@ import org.apache.nifi.processors.standard.db.ColumnDescription; import org.apache.nifi.processors.standard.db.DatabaseAdapter; import java.util.ArrayList; -import java.util.Collections; import java.util.List; /** @@ -125,7 +124,7 @@ public class MSSQLDatabaseAdapter implements DatabaseAdapter { } StringBuilder alterTableStatement = new StringBuilder(); - return Collections.singletonList(alterTableStatement.append("ALTER TABLE ") + return List.of(alterTableStatement.append("ALTER TABLE ") .append(quoteTableName ? getTableQuoteString() : "") .append(tableName) .append(quoteTableName ? getTableQuoteString() : "") diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MySQLDatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MySQLDatabaseAdapter.java index baec18c57a..6e9e317322 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MySQLDatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/MySQLDatabaseAdapter.java @@ -23,7 +23,6 @@ import java.sql.JDBCType; import java.sql.Types; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.stream.Collectors; @@ -91,8 +90,7 @@ public class MySQLDatabaseAdapter extends GenericDatabaseAdapter { throw new IllegalArgumentException("Key column names cannot be null or empty"); } - String columns = columnNames.stream() - .collect(Collectors.joining(", ")); + String columns = String.join(", ", columnNames); String parameterizedInsertValues = columnNames.stream() .map(__ -> "?") @@ -126,8 +124,7 @@ public class MySQLDatabaseAdapter extends GenericDatabaseAdapter { throw new IllegalArgumentException("Key column names cannot be null or empty"); } - String columns = columnNames.stream() - .collect(Collectors.joining(", ")); + String columns = String.join(", ", columnNames); String parameterizedInsertValues = columnNames.stream() .map(__ -> "?") @@ -171,7 +168,7 @@ public class MySQLDatabaseAdapter extends GenericDatabaseAdapter { } StringBuilder alterTableStatement = new StringBuilder(); - return Collections.singletonList(alterTableStatement.append("ALTER TABLE ") + return List.of(alterTableStatement.append("ALTER TABLE ") .append(quoteTableName ? getTableQuoteString() : "") .append(tableName) .append(quoteTableName ? getTableQuoteString() : "") diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/Oracle12DatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/Oracle12DatabaseAdapter.java index 9a5a1943d0..83336faf63 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/Oracle12DatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/Oracle12DatabaseAdapter.java @@ -16,20 +16,19 @@ */ package org.apache.nifi.processors.standard.db.impl; -import java.sql.JDBCType; -import java.sql.Types; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - import org.apache.commons.lang3.StringUtils; import org.apache.nifi.processors.standard.db.ColumnDescription; import org.apache.nifi.processors.standard.db.DatabaseAdapter; import org.apache.nifi.processors.standard.db.TableSchema; +import java.sql.JDBCType; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + import static java.sql.Types.CHAR; import static java.sql.Types.CLOB; import static java.sql.Types.LONGNVARCHAR; @@ -224,7 +223,7 @@ public class Oracle12DatabaseAdapter implements DatabaseAdapter { .append(String.join(", ", columnsAndDatatypes)) .append(") "); - return Collections.singletonList(createTableStatement.toString()); + return List.of(createTableStatement.toString()); } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/OracleDatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/OracleDatabaseAdapter.java index db418b781c..20f0def78d 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/OracleDatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/OracleDatabaseAdapter.java @@ -24,7 +24,6 @@ import org.apache.nifi.processors.standard.db.TableSchema; import java.sql.JDBCType; import java.sql.Types; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Set; @@ -152,7 +151,7 @@ public class OracleDatabaseAdapter implements DatabaseAdapter { .append(String.join(", ", columnsAndDatatypes)) .append(") "); - return Collections.singletonList(createTableStatement.toString()); + return List.of(createTableStatement.toString()); } @Override diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/PhoenixDatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/PhoenixDatabaseAdapter.java index 86d51e7f38..62b9150f15 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/PhoenixDatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/PhoenixDatabaseAdapter.java @@ -24,7 +24,6 @@ import java.sql.JDBCType; import java.sql.Types; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.stream.Collectors; @@ -159,7 +158,7 @@ public final class PhoenixDatabaseAdapter implements DatabaseAdapter { } StringBuilder alterTableStatement = new StringBuilder(); - return Collections.singletonList(alterTableStatement.append("ALTER TABLE ") + return List.of(alterTableStatement.append("ALTER TABLE ") .append(quoteTableName ? getTableQuoteString() : "") .append(tableName) .append(quoteTableName ? getTableQuoteString() : "") diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/PostgreSQLDatabaseAdapter.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/PostgreSQLDatabaseAdapter.java index 8ba7b64b22..89f1834299 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/PostgreSQLDatabaseAdapter.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/db/impl/PostgreSQLDatabaseAdapter.java @@ -23,7 +23,6 @@ import java.sql.JDBCType; import java.sql.Types; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; @@ -72,8 +71,7 @@ public class PostgreSQLDatabaseAdapter extends GenericDatabaseAdapter { throw new IllegalArgumentException("Key column names cannot be null or empty"); } - String columns = columnNames.stream() - .collect(Collectors.joining(", ")); + String columns = String.join(", ", columnNames); String parameterizedInsertValues = columnNames.stream() .map(__ -> "?") @@ -83,7 +81,7 @@ public class PostgreSQLDatabaseAdapter extends GenericDatabaseAdapter { .map(columnName -> "EXCLUDED." + columnName) .collect(Collectors.joining(", ")); - String conflictClause = "(" + uniqueKeyColumnNames.stream().collect(Collectors.joining(", ")) + ")"; + String conflictClause = "(" + String.join(", ", uniqueKeyColumnNames) + ")"; StringBuilder statementStringBuilder = new StringBuilder("INSERT INTO ") .append(table) @@ -112,14 +110,13 @@ public class PostgreSQLDatabaseAdapter extends GenericDatabaseAdapter { throw new IllegalArgumentException("Key column names cannot be null or empty"); } - String columns = columnNames.stream() - .collect(Collectors.joining(", ")); + String columns = String.join(", ", columnNames); String parameterizedInsertValues = columnNames.stream() .map(__ -> "?") .collect(Collectors.joining(", ")); - String conflictClause = "(" + uniqueKeyColumnNames.stream().collect(Collectors.joining(", ")) + ")"; + String conflictClause = "(" + String.join(", ", uniqueKeyColumnNames) + ")"; StringBuilder statementStringBuilder = new StringBuilder("INSERT INTO ") .append(table) @@ -152,7 +149,7 @@ public class PostgreSQLDatabaseAdapter extends GenericDatabaseAdapter { } StringBuilder alterTableStatement = new StringBuilder(); - return Collections.singletonList(alterTableStatement.append("ALTER TABLE ") + return List.of(alterTableStatement.append("ALTER TABLE ") .append(quoteTableName ? getTableQuoteString() : "") .append(tableName) .append(quoteTableName ? getTableQuoteString() : "") diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/faker/FakerUtils.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/faker/FakerUtils.java index 15fc488994..0f9e308826 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/faker/FakerUtils.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/faker/FakerUtils.java @@ -42,8 +42,7 @@ public class FakerUtils { private static final int RANDOM_DATE_DAYS = 365; private static final Map datatypeFunctionMap = new LinkedHashMap<>(); - private static final List providerPackages = Arrays.asList("base", "entertainment", "food", "sport", "videogame"); - + private static final List PROVIDER_PACKAGES = List.of("base", "entertainment", "food", "sport", "videogame"); // Additional Faker datatypes that don't use predetermined data files (i.e. they generate data or have non-String types) static final AllowableValue FT_BOOL = new AllowableValue("Boolean.bool", "Boolean - bool (true/false)", "A value of 'true' or 'false'"); @@ -66,7 +65,7 @@ public class FakerUtils { try { // The providers are in different sub-packages, try them all until one succeeds Class fakerTypeClass = null; - for (String subPackage : providerPackages) { + for (String subPackage : PROVIDER_PACKAGES) { try { fakerTypeClass = Class.forName(PACKAGE_PREFIX + '.' + subPackage + "." + className); break; @@ -91,7 +90,7 @@ public class FakerUtils { Modifier.isPublic(method.getModifiers()) && method.getParameterCount() == 0 && method.getReturnType() == String.class) - .collect(Collectors.toList()); + .toList(); try { final Object methodObject = faker.getClass().getMethod(normalizeMethodName(entry.getKey())).invoke(faker); for (Method method : fakerMethods) { @@ -146,8 +145,7 @@ public class FakerUtils { // If not a special circumstance, use the map to call the associated Faker method and return the value try { final FakerMethodHolder fakerMethodHolder = datatypeFunctionMap.get(type); - Object returnObject = fakerMethodHolder.getMethod().invoke(fakerMethodHolder.getMethodObject()); - return returnObject; + return fakerMethodHolder.getMethod().invoke(fakerMethodHolder.getMethodObject()); } catch (InvocationTargetException | IllegalAccessException e) { throw new ProcessException(type + " is not a valid value", e); } @@ -178,17 +176,15 @@ public class FakerUtils { // This method identifies "segments" by splitting the given name on underscores, then capitalizes each segment and removes the underscores. Ex: 'game_of_thrones' = 'GameOfThrones' private static String normalizeClassName(String name) { String[] segments = name.split("_"); - String newName = Arrays.stream(segments) + + return Arrays.stream(segments) .map(s -> s.substring(0, 1).toUpperCase() + s.substring(1)) .collect(Collectors.joining()); - return newName; } // This method lowercases the first letter of the given name in order to match the name to a Faker method private static String normalizeMethodName(String name) { - - String newName = name.substring(0, 1).toLowerCase() + name.substring(1); - return newName; + return name.substring(0, 1).toLowerCase() + name.substring(1); } // This method splits the given name on uppercase letters, ensures the first letter is capitalized, then joins the segments using a space. Ex. 'gameOfThrones' = 'Game Of Thrones' diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ftp/NifiFtpServer.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ftp/NifiFtpServer.java index b2bc134788..281163af4b 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ftp/NifiFtpServer.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ftp/NifiFtpServer.java @@ -43,7 +43,6 @@ import org.apache.nifi.processors.standard.ftp.filesystem.VirtualFileSystemFacto import org.apache.nifi.ssl.SSLContextService; import java.io.File; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; @@ -213,9 +212,9 @@ public class NifiFtpServer implements org.apache.nifi.processors.standard.ftp.Ft private User createUser(String username, String password, String homeDirectory) { boolean anonymousLoginEnabled = (username == null); if (anonymousLoginEnabled) { - return createAnonymousUser(homeDirectory, Collections.singletonList(new WritePermission())); + return createAnonymousUser(homeDirectory, List.of(new WritePermission())); } else { - return createNamedUser(username, password, homeDirectory, Collections.singletonList(new WritePermission())); + return createNamedUser(username, password, homeDirectory, List.of(new WritePermission())); } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ftp/commands/FtpCommandHELP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ftp/commands/FtpCommandHELP.java index 98c4c0469b..4a3f921657 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ftp/commands/FtpCommandHELP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ftp/commands/FtpCommandHELP.java @@ -23,8 +23,6 @@ import org.apache.ftpserver.ftplet.FtpRequest; import org.apache.ftpserver.impl.FtpIoSession; import org.apache.ftpserver.impl.FtpServerContext; -import java.util.Collections; -import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Set; @@ -32,54 +30,51 @@ import java.util.TreeSet; public class FtpCommandHELP extends AbstractCommand { - private static Map COMMAND_SPECIFIC_HELP; - private static int MAX_NUMBER_OF_COMMANDS_IN_A_ROW = 5; - private Set availableCommands = new TreeSet<>(); + private static final Map COMMAND_SPECIFIC_HELP = Map.ofEntries( + Map.entry("ABOR", "Syntax: ABOR"), + Map.entry("APPE", "Syntax: APPE "), + Map.entry("AUTH", "Syntax: AUTH "), + Map.entry("CDUP", "Syntax: CDUP"), + Map.entry("CWD", "Syntax: CWD "), + Map.entry("DELE", "Syntax: DELE "), + Map.entry("EPRT", "Syntax: EPRT"), + Map.entry("EPSV", "Syntax: EPSV"), + Map.entry("FEAT", "Syntax: FEAT"), + Map.entry("HELP", "Syntax: HELP [ ]"), + Map.entry("LIST", "Syntax: LIST [ ]"), + Map.entry("MDTM", "Syntax: MDTM "), + Map.entry("MKD", "Syntax: MKD "), + Map.entry("MLSD", "Syntax: MLSD [ ]"), + Map.entry("MLST", "Syntax: MLST [ ]"), + Map.entry("MODE", "Syntax: MODE "), + Map.entry("NLST", "Syntax: NLST [ ]"), + Map.entry("NOOP", "Syntax: NOOP"), + Map.entry("OPTS", "Syntax: OPTS "), + Map.entry("PASS", "Syntax: PASS "), + Map.entry("PASV", "Syntax: PASV"), + Map.entry("PBSZ", "Syntax: PBSZ "), + Map.entry("PORT", "Syntax: PORT "), + Map.entry("PROT", "Syntax: PROT "), + Map.entry("PWD", "Syntax: PWD"), + Map.entry("QUIT", "Syntax: QUIT"), + Map.entry("REIN", "Syntax: REIN"), + Map.entry("REST", "Syntax: REST "), + Map.entry("RETR", "Syntax: RETR "), + Map.entry("RMD", "Syntax: RMD "), + Map.entry("RNFR", "Syntax: RNFR "), + Map.entry("RNTO", "Syntax: RNTO "), + Map.entry("SITE", "Syntax: SITE "), + Map.entry("SIZE", "Syntax: SIZE "), + Map.entry("STAT", "Syntax: STAT [ ]"), + Map.entry("STOR", "Syntax: STOR "), + Map.entry("STOU", "Syntax: STOU"), + Map.entry("SYST", "Syntax: SYST"), + Map.entry("TYPE", "Syntax: TYPE "), + Map.entry("USER", "Syntax: USER ") + ); - static { - Map commands = new HashMap<>(); - commands.put("ABOR", "Syntax: ABOR"); - commands.put("APPE", "Syntax: APPE "); - commands.put("AUTH", "Syntax: AUTH "); - commands.put("CDUP", "Syntax: CDUP"); - commands.put("CWD", "Syntax: CWD "); - commands.put("DELE", "Syntax: DELE "); - commands.put("EPRT", "Syntax: EPRT"); - commands.put("EPSV", "Syntax: EPSV"); - commands.put("FEAT", "Syntax: FEAT"); - commands.put("HELP", "Syntax: HELP [ ]"); - commands.put("LIST", "Syntax: LIST [ ]"); - commands.put("MDTM", "Syntax: MDTM "); - commands.put("MKD", "Syntax: MKD "); - commands.put("MLSD", "Syntax: MLSD [ ]"); - commands.put("MLST", "Syntax: MLST [ ]"); - commands.put("MODE", "Syntax: MODE "); - commands.put("NLST", "Syntax: NLST [ ]"); - commands.put("NOOP", "Syntax: NOOP"); - commands.put("OPTS", "Syntax: OPTS "); - commands.put("PASS", "Syntax: PASS "); - commands.put("PASV", "Syntax: PASV"); - commands.put("PBSZ", "Syntax: PBSZ "); - commands.put("PORT", "Syntax: PORT "); - commands.put("PROT", "Syntax: PROT "); - commands.put("PWD", "Syntax: PWD"); - commands.put("QUIT", "Syntax: QUIT"); - commands.put("REIN", "Syntax: REIN"); - commands.put("REST", "Syntax: REST "); - commands.put("RETR", "Syntax: RETR "); - commands.put("RMD", "Syntax: RMD "); - commands.put("RNFR", "Syntax: RNFR "); - commands.put("RNTO", "Syntax: RNTO "); - commands.put("SITE", "Syntax: SITE "); - commands.put("SIZE", "Syntax: SIZE "); - commands.put("STAT", "Syntax: STAT [ ]"); - commands.put("STOR", "Syntax: STOR "); - commands.put("STOU", "Syntax: STOU"); - commands.put("SYST", "Syntax: SYST"); - commands.put("TYPE", "Syntax: TYPE "); - commands.put("USER", "Syntax: USER "); - COMMAND_SPECIFIC_HELP = Collections.unmodifiableMap(commands); - } + private static final int MAX_NUMBER_OF_COMMANDS_IN_A_ROW = 5; + private final Set availableCommands = new TreeSet<>(); public void addCommand(String command) { if (!command.startsWith("SITE_")) { // Parameterized commands of SITE will not appear in the general help. @@ -89,7 +84,6 @@ public class FtpCommandHELP extends AbstractCommand { public void execute(final FtpIoSession session, final FtpServerContext context, final FtpRequest request) { - // reset state variables session.resetState(); @@ -105,7 +99,7 @@ public class FtpCommandHELP extends AbstractCommand { } private String getDefaultHelpMessage() { - StringBuffer helpMessage = new StringBuffer("The following commands are supported.\n"); + StringBuilder helpMessage = new StringBuilder("The following commands are supported.\n"); int currentNumberOfCommandsInARow = 0; Iterator iterator = availableCommands.iterator(); while (iterator.hasNext()) { @@ -115,7 +109,7 @@ public class FtpCommandHELP extends AbstractCommand { currentNumberOfCommandsInARow = 0; } if (iterator.hasNext()) { - helpMessage.append(command + ", "); + helpMessage.append(command).append(", "); } else { helpMessage.append(command); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/hash/HashAlgorithm.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/hash/HashAlgorithm.java index 46bc7d2cd1..266eebb981 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/hash/HashAlgorithm.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/hash/HashAlgorithm.java @@ -46,7 +46,7 @@ public enum HashAlgorithm { private final int digestBytesLength; private final String description; - private static final List BROKEN_ALGORITHMS = Arrays.asList(MD2.name, MD5.name, SHA1.name); + private static final List BROKEN_ALGORITHMS = List.of(MD2.name, MD5.name, SHA1.name); HashAlgorithm(String name, int digestBytesLength, String description) { this.name = name; @@ -68,9 +68,9 @@ public enum HashAlgorithm { /** * Returns {@code true} if this algorithm is considered cryptographically secure. These determinations were made as of 2018-08-30. - * + *

* Current strong algorithms: - * + *

* * SHA-224 (SHA2) * * SHA-256 (SHA2) * * SHA-384 (SHA2) @@ -85,9 +85,9 @@ public enum HashAlgorithm { * * Blake2b-256 * * Blake2b-384 * * Blake2b-512 - * + *

* Current broken algorithms: - * + *

* * MD2 * * MD5 * * SHA-1 @@ -114,9 +114,9 @@ public enum HashAlgorithm { /** * Returns a more complete description of the algorithm for {@link org.apache.nifi.components.AllowableValue} construction. - * + *

* Ex: - * + *

* {@code description} -- Cryptographically broken due to collisions * {@code buildAllowableValueDescription} -- SHA-1 (20 byte output) [WARNING -- Cryptographically broken] Cryptographically broken due to collisions * diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/hash/HashService.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/hash/HashService.java index 5e7c282594..a426252b4e 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/hash/HashService.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/hash/HashService.java @@ -16,6 +16,11 @@ */ package org.apache.nifi.processors.standard.hash; +import org.apache.nifi.components.AllowableValue; +import org.bouncycastle.crypto.digests.Blake2bDigest; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; @@ -23,13 +28,8 @@ import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; -import java.util.Arrays; import java.util.HexFormat; import java.util.List; -import org.apache.nifi.components.AllowableValue; -import org.bouncycastle.crypto.digests.Blake2bDigest; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * This class provides a generic service for cryptographic hashing. @@ -82,12 +82,14 @@ public class HashService { * @return the list of charsets */ public static List getSupportedCharsets() { - return Arrays.asList(StandardCharsets.US_ASCII, + return List.of( + StandardCharsets.US_ASCII, StandardCharsets.ISO_8859_1, StandardCharsets.UTF_8, StandardCharsets.UTF_16BE, StandardCharsets.UTF_16LE, - StandardCharsets.UTF_16); + StandardCharsets.UTF_16 + ); } /** diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/http/HttpProtocolStrategy.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/http/HttpProtocolStrategy.java index d99d4182ef..d580774028 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/http/HttpProtocolStrategy.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/http/HttpProtocolStrategy.java @@ -19,21 +19,15 @@ package org.apache.nifi.processors.standard.http; import org.apache.nifi.components.DescribedValue; import org.apache.nifi.jetty.configuration.connector.ApplicationLayerProtocol; -import java.util.LinkedHashSet; import java.util.Set; -import static java.util.Arrays.asList; -import static java.util.Collections.singleton; - /** * HTTP protocol configuration strategy */ public enum HttpProtocolStrategy implements DescribedValue { - HTTP_1_1("http/1.1", "HTTP/1.1", singleton(ApplicationLayerProtocol.HTTP_1_1)), - - H2_HTTP_1_1("h2 http/1.1", "HTTP/2 and HTTP/1.1 negotiated based on requested protocols", new LinkedHashSet<>(asList(ApplicationLayerProtocol.HTTP_1_1, ApplicationLayerProtocol.H2))), - - H2("h2", "HTTP/2", singleton(ApplicationLayerProtocol.H2)); + HTTP_1_1("http/1.1", "HTTP/1.1", Set.of(ApplicationLayerProtocol.HTTP_1_1)), + H2_HTTP_1_1("h2 http/1.1", "HTTP/2 and HTTP/1.1 negotiated based on requested protocols", Set.of(ApplicationLayerProtocol.HTTP_1_1, ApplicationLayerProtocol.H2)), + H2("h2", "HTTP/2", Set.of(ApplicationLayerProtocol.H2)); private final String displayName; diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBin.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBin.java index c15f4968d3..ebb6bdca7f 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBin.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBin.java @@ -34,11 +34,11 @@ import org.apache.nifi.stream.io.ByteCountingOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.locks.Lock; @@ -146,7 +146,7 @@ public class RecordBin { // here because it needs to be closed before we are able to migrate the FlowFile // to a new Session. recordReader.close(); - flowFileSession.migrate(this.session, Collections.singleton(flowFile)); + flowFileSession.migrate(this.session, Set.of(flowFile)); flowFileMigrated = true; this.flowFiles.add(flowFile); @@ -184,7 +184,7 @@ public class RecordBin { } if (!flowFileMigrated) { - flowFileSession.migrate(this.session, Collections.singleton(flowFile)); + flowFileSession.migrate(this.session, Set.of(flowFile)); this.flowFiles.add(flowFile); } } finally { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBinManager.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBinManager.java index 08007e37f7..db36e8927c 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBinManager.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/merge/RecordBinManager.java @@ -30,7 +30,6 @@ import org.apache.nifi.serialization.RecordReader; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -139,7 +138,7 @@ public class RecordBinManager { // We have to do this outside of our for-loop above in order to avoid a concurrent modification Exception. if (acceptedBin != null) { if (acceptedBin.isComplete()) { - removeBins(groupIdentifier, Collections.singletonList(acceptedBin)); + removeBins(groupIdentifier, List.of(acceptedBin)); } return; @@ -226,7 +225,7 @@ public class RecordBinManager { return; } - removeBins(oldestBinGroup, Collections.singletonList(oldestBin)); + removeBins(oldestBinGroup, List.of(oldestBin)); } finally { lock.unlock(); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java index 138645b060..429b3f174e 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/servlets/ListenHTTPServlet.java @@ -16,27 +16,6 @@ */ package org.apache.nifi.processors.standard.servlets; -import java.io.BufferedInputStream; -import java.io.BufferedOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.charset.StandardCharsets; -import java.security.cert.X509Certificate; -import java.util.Collection; -import java.util.Enumeration; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.ConcurrentMap; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; -import java.util.regex.Pattern; -import java.util.zip.GZIPInputStream; import jakarta.servlet.AsyncContext; import jakarta.servlet.MultipartConfigElement; import jakarta.servlet.ServletConfig; @@ -77,6 +56,28 @@ import org.apache.nifi.util.FlowFileUnpackagerV2; import org.apache.nifi.util.FlowFileUnpackagerV3; import org.eclipse.jetty.ee10.servlet.ServletContextRequest; +import java.io.BufferedInputStream; +import java.io.BufferedOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.security.cert.X509Certificate; +import java.util.Collection; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import java.util.regex.Pattern; +import java.util.zip.GZIPInputStream; + @Path("") public class ListenHTTPServlet extends HttpServlet { diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ssh/StandardSSHClientProvider.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ssh/StandardSSHClientProvider.java index 1a612c9292..d4fac98180 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ssh/StandardSSHClientProvider.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/ssh/StandardSSHClientProvider.java @@ -33,7 +33,6 @@ import net.schmizz.sshj.userauth.method.AuthPublickey; import net.schmizz.sshj.userauth.method.PasswordResponseProvider; import net.schmizz.sshj.userauth.password.PasswordFinder; import net.schmizz.sshj.userauth.password.PasswordUtils; - import org.apache.nifi.context.PropertyContext; import org.apache.nifi.processors.standard.socket.ClientAuthenticationException; import org.apache.nifi.processors.standard.socket.ClientConfigurationException; @@ -49,34 +48,33 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.net.Proxy; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; import static org.apache.nifi.processors.standard.util.FTPTransfer.createComponentProxyConfigSupplier; -import static org.apache.nifi.processors.standard.util.SFTPTransfer.DATA_TIMEOUT; import static org.apache.nifi.processors.standard.util.SFTPTransfer.CONNECTION_TIMEOUT; -import static org.apache.nifi.processors.standard.util.SFTPTransfer.PORT; -import static org.apache.nifi.processors.standard.util.SFTPTransfer.USERNAME; -import static org.apache.nifi.processors.standard.util.SFTPTransfer.PASSWORD; +import static org.apache.nifi.processors.standard.util.SFTPTransfer.DATA_TIMEOUT; import static org.apache.nifi.processors.standard.util.SFTPTransfer.HOSTNAME; import static org.apache.nifi.processors.standard.util.SFTPTransfer.HOST_KEY_FILE; +import static org.apache.nifi.processors.standard.util.SFTPTransfer.PASSWORD; +import static org.apache.nifi.processors.standard.util.SFTPTransfer.PORT; import static org.apache.nifi.processors.standard.util.SFTPTransfer.PRIVATE_KEY_PASSPHRASE; import static org.apache.nifi.processors.standard.util.SFTPTransfer.PRIVATE_KEY_PATH; import static org.apache.nifi.processors.standard.util.SFTPTransfer.STRICT_HOST_KEY_CHECKING; +import static org.apache.nifi.processors.standard.util.SFTPTransfer.USERNAME; import static org.apache.nifi.processors.standard.util.SFTPTransfer.USE_COMPRESSION; /** * Standard implementation of SSH Client Provider */ public class StandardSSHClientProvider implements SSHClientProvider { - private static final SSHConfigProvider SSH_CONFIG_PROVIDER = new StandardSSHConfigProvider(); + private static final SSHConfigProvider SSH_CONFIG_PROVIDER = new StandardSSHConfigProvider(); private static final SocketFactoryProvider SOCKET_FACTORY_PROVIDER = new StandardSocketFactoryProvider(); - private static final List SUPPORTED_PROXY_TYPES = Arrays.asList(Proxy.Type.HTTP, Proxy.Type.SOCKS); + private static final List SUPPORTED_PROXY_TYPES = List.of(Proxy.Type.HTTP, Proxy.Type.SOCKS); private static final String ADDRESS_FORMAT = "%s:%d"; diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java index 1bd0ec7cee..23d8eddded 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/util/SFTPTransfer.java @@ -87,14 +87,14 @@ public class SFTPTransfer implements FileTransfer { static { DefaultConfig defaultConfig = new DefaultConfig(); - DEFAULT_KEY_ALGORITHM_NAMES = Collections.unmodifiableSet(defaultConfig.getKeyAlgorithms().stream() - .map(Factory.Named::getName).collect(Collectors.toSet())); - DEFAULT_CIPHER_NAMES = Collections.unmodifiableSet(defaultConfig.getCipherFactories().stream() - .map(Factory.Named::getName).collect(Collectors.toSet())); - DEFAULT_MESSAGE_AUTHENTICATION_CODE_NAMES = Collections.unmodifiableSet(defaultConfig.getMACFactories().stream() - .map(Factory.Named::getName).collect(Collectors.toSet())); - DEFAULT_KEY_EXCHANGE_ALGORITHM_NAMES = Collections.unmodifiableSet(defaultConfig.getKeyExchangeFactories().stream() - .map(Factory.Named::getName).collect(Collectors.toSet())); + DEFAULT_KEY_ALGORITHM_NAMES = defaultConfig.getKeyAlgorithms().stream() + .map(Factory.Named::getName).collect(Collectors.toUnmodifiableSet()); + DEFAULT_CIPHER_NAMES = defaultConfig.getCipherFactories().stream() + .map(Factory.Named::getName).collect(Collectors.toUnmodifiableSet()); + DEFAULT_MESSAGE_AUTHENTICATION_CODE_NAMES = defaultConfig.getMACFactories().stream() + .map(Factory.Named::getName).collect(Collectors.toUnmodifiableSet()); + DEFAULT_KEY_EXCHANGE_ALGORITHM_NAMES = defaultConfig.getKeyExchangeFactories().stream() + .map(Factory.Named::getName).collect(Collectors.toUnmodifiableSet()); } /** @@ -478,15 +478,10 @@ public class SFTPTransfer implements FileTransfer { @Override public FlowFile getRemoteFile(final String remoteFileName, final FlowFile origFlowFile, final ProcessSession session) throws ProcessException, IOException { final SFTPClient sftpClient = getSFTPClient(origFlowFile); - RemoteFile rf = null; - RemoteFile.ReadAheadRemoteFileInputStream rfis = null; - FlowFile resultFlowFile; - try { - rf = sftpClient.open(remoteFileName); - rfis = rf.new ReadAheadRemoteFileInputStream(16); - final InputStream in = rfis; - resultFlowFile = session.write(origFlowFile, out -> StreamUtils.copy(in, out)); - return resultFlowFile; + + try (RemoteFile rf = sftpClient.open(remoteFileName); + RemoteFile.ReadAheadRemoteFileInputStream rfis = rf.new ReadAheadRemoteFileInputStream(16)) { + return session.write(origFlowFile, out -> StreamUtils.copy(rfis, out)); } catch (final SFTPException e) { switch (e.getStatusCode()) { case NO_SUCH_FILE: @@ -496,21 +491,6 @@ public class SFTPTransfer implements FileTransfer { default: throw new IOException("Failed to obtain file content for " + remoteFileName, e); } - } finally { - if (rf != null) { - try { - rf.close(); - } catch (final IOException ioe) { - //do nothing - } - } - if (rfis != null) { - try { - rfis.close(); - } catch (final IOException ioe) { - //do nothing - } - } } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/InvokeHTTPTest.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/InvokeHTTPTest.java index 6f69ce257f..3699c842df 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/InvokeHTTPTest.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/InvokeHTTPTest.java @@ -27,8 +27,8 @@ import org.apache.nifi.oauth2.OAuth2AccessTokenProvider; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.util.URLValidator; import org.apache.nifi.processors.standard.http.ContentEncodingStrategy; -import org.apache.nifi.processors.standard.http.FlowFileNamingStrategy; import org.apache.nifi.processors.standard.http.CookieStrategy; +import org.apache.nifi.processors.standard.http.FlowFileNamingStrategy; import org.apache.nifi.processors.standard.http.HttpHeader; import org.apache.nifi.processors.standard.http.HttpMethod; import org.apache.nifi.provenance.ProvenanceEventRecord; @@ -46,6 +46,13 @@ import org.apache.nifi.util.MockFlowFile; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; import org.apache.nifi.web.util.ssl.SslContextUtils; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; import org.mockito.Answers; import javax.net.ssl.SSLContext; @@ -67,19 +74,11 @@ import java.util.regex.Pattern; import java.util.stream.Stream; import java.util.zip.GZIPInputStream; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.Arguments; -import org.junit.jupiter.params.provider.MethodSource; - -import static java.net.HttpURLConnection.HTTP_OK; -import static java.net.HttpURLConnection.HTTP_MOVED_TEMP; import static java.net.HttpURLConnection.HTTP_BAD_REQUEST; -import static java.net.HttpURLConnection.HTTP_UNAUTHORIZED; import static java.net.HttpURLConnection.HTTP_INTERNAL_ERROR; +import static java.net.HttpURLConnection.HTTP_MOVED_TEMP; +import static java.net.HttpURLConnection.HTTP_OK; +import static java.net.HttpURLConnection.HTTP_UNAUTHORIZED; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; @@ -315,7 +314,7 @@ public class InvokeHTTPTest { assertRelationshipStatusCodeEquals(InvokeHTTP.ORIGINAL, HTTP_OK); - final MockFlowFile flowFile = runner.getFlowFilesForRelationship(InvokeHTTP.ORIGINAL).iterator().next(); + final MockFlowFile flowFile = runner.getFlowFilesForRelationship(InvokeHTTP.ORIGINAL).getFirst(); flowFile.assertAttributeEquals(outputAttributeKey, body); } @@ -715,7 +714,7 @@ public class InvokeHTTPTest { @Test public void testRunPostHttp200SuccessContentEncodingGzip() throws InterruptedException, IOException { runner.setProperty(InvokeHTTP.HTTP_METHOD, HttpMethod.POST.name()); - runner.setProperty(InvokeHTTP.REQUEST_CONTENT_ENCODING, ContentEncodingStrategy.GZIP.getValue()); + runner.setProperty(InvokeHTTP.REQUEST_CONTENT_ENCODING, ContentEncodingStrategy.GZIP); runner.setProperty(InvokeHTTP.REQUEST_BODY_ENABLED, Boolean.TRUE.toString()); enqueueResponseCodeAndRun(HTTP_OK); @@ -809,7 +808,7 @@ public class InvokeHTTPTest { runner.run(); - final MockFlowFile flowFile = runner.getFlowFilesForRelationship(InvokeHTTP.RESPONSE).iterator().next(); + final MockFlowFile flowFile = runner.getFlowFilesForRelationship(InvokeHTTP.RESPONSE).getFirst(); flowFile.assertAttributeEquals(CoreAttributes.FILENAME.key(), expectedFileName); } @@ -940,15 +939,15 @@ public class InvokeHTTPTest { } private MockFlowFile getFailureFlowFile() { - return runner.getFlowFilesForRelationship(InvokeHTTP.FAILURE).iterator().next(); + return runner.getFlowFilesForRelationship(InvokeHTTP.FAILURE).getFirst(); } private MockFlowFile getRequestFlowFile() { - return runner.getFlowFilesForRelationship(InvokeHTTP.ORIGINAL).iterator().next(); + return runner.getFlowFilesForRelationship(InvokeHTTP.ORIGINAL).getFirst(); } private MockFlowFile getResponseFlowFile() { - return runner.getFlowFilesForRelationship(InvokeHTTP.RESPONSE).iterator().next(); + return runner.getFlowFilesForRelationship(InvokeHTTP.RESPONSE).getFirst(); } private void assertRequestMethodSuccess(final HttpMethod httpMethod) throws InterruptedException { @@ -965,7 +964,7 @@ public class InvokeHTTPTest { final List responseFlowFiles = runner.getFlowFilesForRelationship(relationship); final String message = String.format("FlowFiles not found for Relationship [%s]", relationship); assertFalse(responseFlowFiles.isEmpty(), message); - final MockFlowFile responseFlowFile = responseFlowFiles.iterator().next(); + final MockFlowFile responseFlowFile = responseFlowFiles.getFirst(); assertStatusCodeEquals(responseFlowFile, statusCode); } @@ -1000,7 +999,7 @@ public class InvokeHTTPTest { assertResponseSuccessRelationships(); assertRelationshipStatusCodeEquals(InvokeHTTP.RESPONSE, HTTP_OK); - final MockFlowFile flowFile = runner.getFlowFilesForRelationship(InvokeHTTP.RESPONSE).iterator().next(); + final MockFlowFile flowFile = runner.getFlowFilesForRelationship(InvokeHTTP.RESPONSE).getFirst(); flowFile.assertAttributeExists(InvokeHTTP.REMOTE_DN); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestAttributesToJSON.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestAttributesToJSON.java index 5e5f244d19..8e91931fbc 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestAttributesToJSON.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestAttributesToJSON.java @@ -45,6 +45,7 @@ import java.util.stream.Stream; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -68,9 +69,7 @@ public class TestAttributesToJSON { FlowFile ff = session.create(); testRunner.enqueue(ff); - assertThrows(AssertionError.class, () -> { - testRunner.run(); - }); + assertThrows(AssertionError.class, testRunner::run); } @Test @@ -84,9 +83,7 @@ public class TestAttributesToJSON { FlowFile ff = session.create(); testRunner.enqueue(ff); - assertThrows(AssertionError.class, () -> { - testRunner.run(); - }); + assertThrows(AssertionError.class, testRunner::run); } @Test @@ -104,14 +101,14 @@ public class TestAttributesToJSON { testRunner.run(); //Expecting success transition because Jackson is taking care of escaping the bad JSON characters - testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst() .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); //Make sure that the value is a true JSON null for the non existing attribute String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS) - .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); + .getFirst().getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); Map val = MAPPER.readValue(json, HashMap.class); @@ -133,14 +130,14 @@ public class TestAttributesToJSON { testRunner.run(); //Expecting success transition because Jackson is taking care of escaping the bad JSON characters - testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst() .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); //Make sure that the value is a true JSON null for the non existing attribute String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS) - .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); + .getFirst().getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); Map val = MAPPER.readValue(json, HashMap.class); @@ -148,7 +145,7 @@ public class TestAttributesToJSON { } @Test - public void testInvalidJSONValueInAttribute() throws Exception { + public void testInvalidJSONValueInAttribute() { final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON()); testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_ATTRIBUTE); @@ -162,7 +159,7 @@ public class TestAttributesToJSON { testRunner.run(); //Expecting success transition because Jackson is taking care of escaping the bad JSON characters - testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst() .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); @@ -182,21 +179,21 @@ public class TestAttributesToJSON { testRunner.enqueue(ff); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst() .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS) - .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); + .getFirst().getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); Map val = MAPPER.readValue(json, HashMap.class); - assertTrue(val.get(TEST_ATTRIBUTE_KEY).equals(TEST_ATTRIBUTE_VALUE)); + assertEquals(TEST_ATTRIBUTE_VALUE, val.get(TEST_ATTRIBUTE_KEY)); } @Test - public void testContent_emptyListUserSpecifiedAttributes() throws Exception { + public void testContent_emptyListUserSpecifiedAttributes() { final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON()); testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_CONTENT); testRunner.setProperty(AttributesToJSON.INCLUDE_CORE_ATTRIBUTES, "false"); @@ -207,11 +204,11 @@ public class TestAttributesToJSON { testRunner.enqueue(ff); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst() .assertAttributeNotExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); - testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0).assertContentEquals("{}"); + testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst().assertContentEquals("{}"); } @Test @@ -227,17 +224,17 @@ public class TestAttributesToJSON { testRunner.enqueue(ff); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst() .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS) - .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); + .getFirst().getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); Map val = MAPPER.readValue(json, HashMap.class); - assertTrue(val.get(TEST_ATTRIBUTE_KEY).equals(TEST_ATTRIBUTE_VALUE)); - assertTrue(val.size() == 1); + assertEquals(TEST_ATTRIBUTE_VALUE, val.get(TEST_ATTRIBUTE_KEY)); + assertEquals(1, val.size()); } @@ -254,17 +251,17 @@ public class TestAttributesToJSON { testRunner.enqueue(ff); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst() .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS) - .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); + .getFirst().getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); Map val = MAPPER.readValue(json, HashMap.class); - assertTrue(val.get(TEST_ATTRIBUTE_KEY).equals(TEST_ATTRIBUTE_VALUE)); - assertTrue(val.size() == 1); + assertEquals(TEST_ATTRIBUTE_VALUE, val.get(TEST_ATTRIBUTE_KEY)); + assertEquals(1, val.size()); } @@ -281,19 +278,19 @@ public class TestAttributesToJSON { testRunner.enqueue(ff); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst() .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS) - .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); + .getFirst().getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); Map val = MAPPER.readValue(json, HashMap.class); //If a Attribute is requested but does not exist then it is placed in the JSON with an empty string - assertTrue(val.get("NonExistingAttribute").equals("")); - assertTrue(val.size() == 1); + assertEquals("", val.get("NonExistingAttribute")); + assertEquals(1, val.size()); } @Test @@ -310,13 +307,13 @@ public class TestAttributesToJSON { testRunner.enqueue(ff); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst() .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS) - .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); + .getFirst().getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); Map val = MAPPER.readValue(json, HashMap.class); assertEquals(TEST_ATTRIBUTE_VALUE, val.get(TEST_ATTRIBUTE_KEY)); @@ -338,13 +335,13 @@ public class TestAttributesToJSON { testRunner.enqueue(ff); testRunner.run(); - testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst() .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS) - .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); + .getFirst().getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); Map val = MAPPER.readValue(json, HashMap.class); assertEquals(TEST_ATTRIBUTE_VALUE, val.get(CoreAttributes.PATH.key())); @@ -368,7 +365,7 @@ public class TestAttributesToJSON { testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); - Map val = MAPPER.readValue(testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0).toByteArray(), HashMap.class); + Map val = MAPPER.readValue(testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst().toByteArray(), HashMap.class); assertEquals(TEST_ATTRIBUTE_VALUE, val.get(TEST_ATTRIBUTE_KEY)); assertEquals(1, val.size()); } @@ -390,7 +387,7 @@ public class TestAttributesToJSON { testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertEquals(AttributesToJSON.APPLICATION_JSON, flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); @@ -416,7 +413,7 @@ public class TestAttributesToJSON { testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertNull(flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); @@ -433,7 +430,7 @@ public class TestAttributesToJSON { testRunner.setProperty(AttributesToJSON.ATTRIBUTES_REGEX, "${regex}"); testRunner.setProperty(AttributesToJSON.ATTRIBUTES_LIST, "test, test1"); - Map attributes = new HashMap(); + Map attributes = new HashMap<>(); attributes.put("delimited.header.column.1", "Registry"); attributes.put("delimited.header.column.2", "Assignment"); attributes.put("delimited.header.column.3", "Organization Name"); @@ -448,16 +445,16 @@ public class TestAttributesToJSON { testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); - MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0); + MockFlowFile flowFile = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst(); Map val = MAPPER.readValue(flowFile.getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME), HashMap.class); - assertTrue(val.keySet().contains("delimited.header.column.1")); - assertTrue(val.keySet().contains("delimited.header.column.2")); - assertTrue(val.keySet().contains("delimited.header.column.3")); - assertTrue(val.keySet().contains("delimited.header.column.4")); - assertTrue(!val.keySet().contains("delimited.footer.column.1")); - assertTrue(val.keySet().contains("test")); - assertTrue(val.keySet().contains("test1")); + assertTrue(val.containsKey("delimited.header.column.1")); + assertTrue(val.containsKey("delimited.header.column.2")); + assertTrue(val.containsKey("delimited.header.column.3")); + assertTrue(val.containsKey("delimited.header.column.4")); + assertFalse(val.containsKey("delimited.footer.column.1")); + assertTrue(val.containsKey("test")); + assertTrue(val.containsKey("test1")); } @ParameterizedTest @@ -465,8 +462,7 @@ public class TestAttributesToJSON { public void testAttributeWithNestedJsonOutputAsJsonInContent(String nestedJson, Class expectedClass) throws IOException { final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON()); testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_CONTENT); - testRunner.setProperty(AttributesToJSON.JSON_HANDLING_STRATEGY, - AttributesToJSON.JsonHandlingStrategy.NESTED.getValue()); + testRunner.setProperty(AttributesToJSON.JSON_HANDLING_STRATEGY, AttributesToJSON.JsonHandlingStrategy.NESTED); ProcessSession session = testRunner.getProcessSessionFactory().createSession(); FlowFile ff = session.create(); @@ -478,7 +474,7 @@ public class TestAttributesToJSON { testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); List flowFilesForRelationship = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS); - MockFlowFile flowFile = flowFilesForRelationship.get(0); + MockFlowFile flowFile = flowFilesForRelationship.getFirst(); assertEquals(AttributesToJSON.APPLICATION_JSON, flowFile.getAttribute(CoreAttributes.MIME_TYPE.key())); Map val = MAPPER.readValue(flowFile.toByteArray(), Map.class); assertInstanceOf(expectedClass, val.get(TEST_ATTRIBUTE_KEY)); @@ -495,8 +491,7 @@ public class TestAttributesToJSON { public void testAttributeWithNestedJsonOutputAsJsonInAttribute(String nestedJson, Class expectedClass) throws IOException { final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON()); testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_ATTRIBUTE); - testRunner.setProperty(AttributesToJSON.JSON_HANDLING_STRATEGY, - AttributesToJSON.JsonHandlingStrategy.NESTED.getValue()); + testRunner.setProperty(AttributesToJSON.JSON_HANDLING_STRATEGY, AttributesToJSON.JsonHandlingStrategy.NESTED); ProcessSession session = testRunner.getProcessSessionFactory().createSession(); FlowFile ff = session.create(); @@ -506,11 +501,11 @@ public class TestAttributesToJSON { testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); - testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst() .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS) - .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); + .getFirst().getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); Map val = MAPPER.readValue(json, Map.class); assertInstanceOf(expectedClass, val.get(TEST_ATTRIBUTE_KEY)); } @@ -520,8 +515,7 @@ public class TestAttributesToJSON { public void testAttributesWithLookALikeJson(String lookAlikeJson) { final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON()); testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_CONTENT); - testRunner.setProperty(AttributesToJSON.JSON_HANDLING_STRATEGY, - AttributesToJSON.JsonHandlingStrategy.NESTED.getValue()); + testRunner.setProperty(AttributesToJSON.JSON_HANDLING_STRATEGY, AttributesToJSON.JsonHandlingStrategy.NESTED); ProcessSession session = testRunner.getProcessSessionFactory().createSession(); FlowFile ff = session.create(); @@ -532,7 +526,7 @@ public class TestAttributesToJSON { testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 1); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 0); MockComponentLog logger = testRunner.getLogger(); - assertTrue(logger.getErrorMessages().get(0).getMsg().contains("expecting")); + assertTrue(logger.getErrorMessages().getFirst().getMsg().contains("expecting")); } @ParameterizedTest @@ -549,11 +543,11 @@ public class TestAttributesToJSON { testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); - testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0) + testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst() .assertAttributeExists(AttributesToJSON.JSON_ATTRIBUTE_NAME); String json = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS) - .get(0).getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); + .getFirst().getAttribute(AttributesToJSON.JSON_ATTRIBUTE_NAME); Map val = MAPPER.readValue(json, Map.class); assertInstanceOf(String.class, val.get(TEST_ATTRIBUTE_KEY)); } @@ -569,8 +563,7 @@ public class TestAttributesToJSON { final TestRunner testRunner = TestRunners.newTestRunner(new AttributesToJSON()); testRunner.setProperty(AttributesToJSON.DESTINATION, AttributesToJSON.DESTINATION_CONTENT); - testRunner.setProperty(AttributesToJSON.JSON_HANDLING_STRATEGY, - AttributesToJSON.JsonHandlingStrategy.NESTED.getValue()); + testRunner.setProperty(AttributesToJSON.JSON_HANDLING_STRATEGY, AttributesToJSON.JsonHandlingStrategy.NESTED); ProcessSession session = testRunner.getProcessSessionFactory().createSession(); FlowFile ff = session.create(); @@ -580,7 +573,7 @@ public class TestAttributesToJSON { testRunner.assertTransferCount(AttributesToJSON.REL_FAILURE, 0); testRunner.assertTransferCount(AttributesToJSON.REL_SUCCESS, 1); - MockFlowFile result = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).get(0); + MockFlowFile result = testRunner.getFlowFilesForRelationship(AttributesToJSON.REL_SUCCESS).getFirst(); Map attributes = MAPPER.readValue(result.getContent(), Map.class); assertInstanceOf(Map.class, attributes.get(TEST_ATTRIBUTE_KEY)); } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestFetchDistributedMapCache.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestFetchDistributedMapCache.java index ce4861b0a9..96798c28c6 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestFetchDistributedMapCache.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestFetchDistributedMapCache.java @@ -44,13 +44,13 @@ public class TestFetchDistributedMapCache { service = new MockCacheClient(); runner.addControllerService("service", service); runner.enableControllerService(service); - runner.setProperty(FetchDistributedMapCache.PROP_DISTRIBUTED_CACHE_SERVICE, "service"); + runner.setProperty(FetchDistributedMapCache.DISTRIBUTED_CACHE_SERVICE, "service"); } @Test public void testNoCacheKey() { - runner.setProperty(FetchDistributedMapCache.PROP_CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}"); + runner.setProperty(FetchDistributedMapCache.CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}"); final Map props = new HashMap<>(); props.put("cacheKeyAttribute", "1"); @@ -66,7 +66,7 @@ public class TestFetchDistributedMapCache { @Test public void testNoCacheKeyValue() { - runner.setProperty(FetchDistributedMapCache.PROP_CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}"); + runner.setProperty(FetchDistributedMapCache.CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}"); runner.enqueue(new byte[] {}); runner.run(); @@ -79,7 +79,7 @@ public class TestFetchDistributedMapCache { @Test public void testFailingCacheService() { service.setFailOnCalls(true); - runner.setProperty(FetchDistributedMapCache.PROP_CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}"); + runner.setProperty(FetchDistributedMapCache.CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}"); final Map props = new HashMap<>(); props.put("cacheKeyAttribute", "2"); @@ -96,7 +96,7 @@ public class TestFetchDistributedMapCache { @Test public void testSingleFlowFile() throws IOException { service.put("key", "value", new FetchDistributedMapCache.StringSerializer(), new FetchDistributedMapCache.StringSerializer()); - runner.setProperty(FetchDistributedMapCache.PROP_CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}"); + runner.setProperty(FetchDistributedMapCache.CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}"); final Map props = new HashMap<>(); props.put("cacheKeyAttribute", "key"); @@ -117,8 +117,8 @@ public class TestFetchDistributedMapCache { @Test public void testSingleFlowFileToAttribute() throws IOException { service.put("key", "value", new FetchDistributedMapCache.StringSerializer(), new FetchDistributedMapCache.StringSerializer()); - runner.setProperty(FetchDistributedMapCache.PROP_CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}"); - runner.setProperty(FetchDistributedMapCache.PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE, "test"); + runner.setProperty(FetchDistributedMapCache.CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}"); + runner.setProperty(FetchDistributedMapCache.PUT_CACHE_VALUE_IN_ATTRIBUTE, "test"); final Map props = new HashMap<>(); props.put("cacheKeyAttribute", "key"); @@ -139,9 +139,9 @@ public class TestFetchDistributedMapCache { @Test public void testToAttributeTooLong() throws IOException { service.put("key", "value", new FetchDistributedMapCache.StringSerializer(), new FetchDistributedMapCache.StringSerializer()); - runner.setProperty(FetchDistributedMapCache.PROP_CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}"); - runner.setProperty(FetchDistributedMapCache.PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE, "test"); - runner.setProperty(FetchDistributedMapCache.PROP_PUT_ATTRIBUTE_MAX_LENGTH, "3"); + runner.setProperty(FetchDistributedMapCache.CACHE_ENTRY_IDENTIFIER, "${cacheKeyAttribute}"); + runner.setProperty(FetchDistributedMapCache.PUT_CACHE_VALUE_IN_ATTRIBUTE, "test"); + runner.setProperty(FetchDistributedMapCache.PUT_ATTRIBUTE_MAX_LENGTH, "3"); final Map props = new HashMap<>(); props.put("cacheKeyAttribute", "key"); @@ -162,10 +162,10 @@ public class TestFetchDistributedMapCache { public void testMultipleKeysToAttributes() throws IOException { service.put("key1", "value1", new FetchDistributedMapCache.StringSerializer(), new FetchDistributedMapCache.StringSerializer()); service.put("key2", "value2", new FetchDistributedMapCache.StringSerializer(), new FetchDistributedMapCache.StringSerializer()); - runner.setProperty(FetchDistributedMapCache.PROP_CACHE_ENTRY_IDENTIFIER, "key1, key2"); + runner.setProperty(FetchDistributedMapCache.CACHE_ENTRY_IDENTIFIER, "key1, key2"); // Not valid to set multiple keys without Put Cache Value In Attribute set runner.assertNotValid(); - runner.setProperty(FetchDistributedMapCache.PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE, "test"); + runner.setProperty(FetchDistributedMapCache.PUT_CACHE_VALUE_IN_ATTRIBUTE, "test"); runner.assertValid(); final Map props = new HashMap<>(); @@ -184,10 +184,10 @@ public class TestFetchDistributedMapCache { @Test public void testMultipleKeysOneNotFound() throws IOException { service.put("key1", "value1", new FetchDistributedMapCache.StringSerializer(), new FetchDistributedMapCache.StringSerializer()); - runner.setProperty(FetchDistributedMapCache.PROP_CACHE_ENTRY_IDENTIFIER, "key1, key2"); + runner.setProperty(FetchDistributedMapCache.CACHE_ENTRY_IDENTIFIER, "key1, key2"); // Not valid to set multiple keys without Put Cache Value In Attribute set runner.assertNotValid(); - runner.setProperty(FetchDistributedMapCache.PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE, "test"); + runner.setProperty(FetchDistributedMapCache.PUT_CACHE_VALUE_IN_ATTRIBUTE, "test"); runner.assertValid(); final Map props = new HashMap<>(); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestFilterAttribute.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestFilterAttribute.java index 6d9b210374..cac18658c8 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestFilterAttribute.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestFilterAttribute.java @@ -89,7 +89,7 @@ class TestFilterAttribute { @BeforeEach void setUp() { - runner.setProperty(FilterAttribute.FILTER_MODE, FilterAttribute.FilterMode.REMOVE.getValue()); + runner.setProperty(FilterAttribute.FILTER_MODE, FilterAttribute.FilterMode.REMOVE); } @Test @@ -198,7 +198,7 @@ class TestFilterAttribute { } private void runTestWith(Map attributes, String attributeSet, Set expectedAttributes) { - runner.setProperty(FilterAttribute.MATCHING_STRATEGY, FilterAttribute.MatchingStrategy.ENUMERATION.getValue()); + runner.setProperty(FilterAttribute.MATCHING_STRATEGY, FilterAttribute.MatchingStrategy.ENUMERATION); runner.setProperty(FilterAttribute.ATTRIBUTE_ENUMERATION, attributeSet); final MockFlowFile input = runner.enqueue(exampleContent, attributes); @@ -267,7 +267,7 @@ class TestFilterAttribute { @BeforeEach void setUp() { - runner.setProperty(FilterAttribute.FILTER_MODE, FilterAttribute.FilterMode.REMOVE.getValue()); + runner.setProperty(FilterAttribute.FILTER_MODE, FilterAttribute.FilterMode.REMOVE); } @Test @@ -320,7 +320,7 @@ class TestFilterAttribute { } private void runTestWith(Map attributes, String regexPattern, Set expectedAttributes) { - runner.setProperty(FilterAttribute.MATCHING_STRATEGY, FilterAttribute.MatchingStrategy.PATTERN.getValue()); + runner.setProperty(FilterAttribute.MATCHING_STRATEGY, FilterAttribute.MatchingStrategy.PATTERN); runner.setProperty(FilterAttribute.ATTRIBUTE_PATTERN, regexPattern); final MockFlowFile input = runner.enqueue(exampleContent, attributes); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestLookupAttribute.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestLookupAttribute.java index 1ccb1a2af5..1cc5868711 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestLookupAttribute.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestLookupAttribute.java @@ -27,9 +27,7 @@ import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; import org.junit.jupiter.api.Test; -import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; import java.util.Optional; import java.util.Set; @@ -68,7 +66,7 @@ public class TestLookupAttribute { runner.run(1, false); runner.assertAllFlowFilesTransferred(LookupAttribute.REL_MATCHED, 1); - final MockFlowFile flowFile = runner.getFlowFilesForRelationship(LookupAttribute.REL_MATCHED).get(0); + final MockFlowFile flowFile = runner.getFlowFilesForRelationship(LookupAttribute.REL_MATCHED).getFirst(); assertNotNull(flowFile); @@ -109,7 +107,7 @@ public class TestLookupAttribute { runner.run(1, false); runner.assertAllFlowFilesTransferred(LookupAttribute.REL_UNMATCHED, 1); - final MockFlowFile flowFile = runner.getFlowFilesForRelationship(LookupAttribute.REL_UNMATCHED).get(0); + final MockFlowFile flowFile = runner.getFlowFilesForRelationship(LookupAttribute.REL_UNMATCHED).getFirst(); assertNotNull(flowFile); @@ -172,10 +170,7 @@ public class TestLookupAttribute { @Override public Set getRequiredKeys() { - final Set requiredKeys = new HashSet<>(); - requiredKeys.add("key1"); - requiredKeys.add("key2"); - return Collections.unmodifiableSet(requiredKeys); + return Set.of("key1", "key2"); } } @@ -191,7 +186,7 @@ public class TestLookupAttribute { } @Override - public Optional lookup(Map coordinates) throws LookupFailureException { + public Optional lookup(Map coordinates) { return Optional.empty(); } @@ -202,9 +197,7 @@ public class TestLookupAttribute { @Override public Set getRequiredKeys() { - Set set = new HashSet(); - set.add("key"); - return set; + return Set.of("key"); } } } diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPutTCP.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPutTCP.java index c046829504..f20db10aea 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPutTCP.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestPutTCP.java @@ -244,7 +244,7 @@ public class TestPutTCP { runner.setProperty(PutTCP.HOSTNAME, TCP_SERVER_ADDRESS); runner.setProperty(PutTCP.PORT, String.valueOf(eventServer.getListeningPort())); - runner.setProperty(PutTCP.TRANSMISSION_STRATEGY, TransmissionStrategy.RECORD_ORIENTED.getValue()); + runner.setProperty(PutTCP.TRANSMISSION_STRATEGY, TransmissionStrategy.RECORD_ORIENTED); when(writerFactory.getIdentifier()).thenReturn(WRITER_SERVICE_ID); runner.addControllerService(WRITER_SERVICE_ID, writerFactory); diff --git a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestValidateJson.java b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestValidateJson.java index c5650a9885..b9d6810f19 100644 --- a/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestValidateJson.java +++ b/nifi-extension-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/TestValidateJson.java @@ -18,13 +18,13 @@ package org.apache.nifi.processors.standard; import org.apache.commons.io.IOUtils; import org.apache.nifi.controller.AbstractControllerService; +import org.apache.nifi.json.schema.JsonSchema; +import org.apache.nifi.json.schema.SchemaVersion; import org.apache.nifi.processor.Relationship; import org.apache.nifi.provenance.ProvenanceEventType; import org.apache.nifi.reporting.InitializationException; -import org.apache.nifi.json.schema.JsonSchema; import org.apache.nifi.schema.access.JsonSchemaRegistryComponent; import org.apache.nifi.schema.access.SchemaNotFoundException; -import org.apache.nifi.json.schema.SchemaVersion; import org.apache.nifi.schemaregistry.services.JsonSchemaRegistry; import org.apache.nifi.util.TestRunner; import org.apache.nifi.util.TestRunners; @@ -64,7 +64,7 @@ class TestValidateJson { @ParameterizedTest(name = "{2}") @MethodSource("customValidateArgs") - void testCustomValidateMissingProperty(final String strategy) { + void testCustomValidateMissingProperty(final ValidateJson.JsonSchemaStrategy strategy) { runner.setProperty(ValidateJson.SCHEMA_ACCESS_STRATEGY, strategy); runner.enqueue(JSON); @@ -87,7 +87,7 @@ class TestValidateJson { assertValidationErrors(ValidateJson.REL_VALID, false); assertEquals(1, runner.getProvenanceEvents().size()); - assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().get(0).getEventType()); + assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().getFirst().getEventType()); } @Test @@ -105,7 +105,7 @@ class TestValidateJson { assertValidationErrors(ValidateJson.REL_VALID, false); assertEquals(1, runner.getProvenanceEvents().size()); - assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().get(0).getEventType()); + assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().getFirst().getEventType()); } @Test @@ -122,7 +122,7 @@ class TestValidateJson { assertValidationErrors(ValidateJson.REL_VALID, false); assertEquals(1, runner.getProvenanceEvents().size()); - assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().get(0).getEventType()); + assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().getFirst().getEventType()); } @Test @@ -140,7 +140,7 @@ class TestValidateJson { assertValidationErrors(ValidateJson.REL_VALID, false); assertEquals(1, runner.getProvenanceEvents().size()); - assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().get(0).getEventType()); + assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().getFirst().getEventType()); } @Test @@ -158,7 +158,7 @@ class TestValidateJson { assertValidationErrors(ValidateJson.REL_INVALID, true); assertEquals(1, runner.getProvenanceEvents().size()); - assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().get(0).getEventType()); + assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().getFirst().getEventType()); } @Test @@ -176,7 +176,7 @@ class TestValidateJson { assertValidationErrors(ValidateJson.REL_INVALID, true); assertEquals(1, runner.getProvenanceEvents().size()); - assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().get(0).getEventType()); + assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().getFirst().getEventType()); } @Test @@ -193,7 +193,7 @@ class TestValidateJson { assertValidationErrors(ValidateJson.REL_FAILURE, false); assertEquals(1, runner.getProvenanceEvents().size()); - assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().get(0).getEventType()); + assertEquals(ProvenanceEventType.ROUTE, runner.getProvenanceEvents().getFirst().getEventType()); } @Test @@ -239,7 +239,7 @@ class TestValidateJson { runner.addControllerService(registryIdentifier, validJsonSchemaRegistry); runner.enableControllerService(validJsonSchemaRegistry); runner.assertValid(validJsonSchemaRegistry); - runner.setProperty(ValidateJson.SCHEMA_ACCESS_STRATEGY, ValidateJson.JsonSchemaStrategy.SCHEMA_NAME_PROPERTY.getValue()); + runner.setProperty(ValidateJson.SCHEMA_ACCESS_STRATEGY, ValidateJson.JsonSchemaStrategy.SCHEMA_NAME_PROPERTY); runner.setProperty(ValidateJson.SCHEMA_REGISTRY, registryIdentifier); Map attributes = new HashMap<>(); @@ -253,7 +253,7 @@ class TestValidateJson { } private void assertValidationErrors(Relationship relationship, boolean expected) { - final Map attributes = runner.getFlowFilesForRelationship(relationship).get(0).getAttributes(); + final Map attributes = runner.getFlowFilesForRelationship(relationship).getFirst().getAttributes(); if (expected) { // JSON library supports English and French validation output. Validate existence of message rather than value. @@ -265,8 +265,8 @@ class TestValidateJson { private static Stream customValidateArgs() { return Stream.of( - Arguments.of(ValidateJson.JsonSchemaStrategy.SCHEMA_NAME_PROPERTY.getValue(), "requires that the JSON Schema Registry property be set"), - Arguments.of(ValidateJson.JsonSchemaStrategy.SCHEMA_CONTENT_PROPERTY.getValue(), "requires that the JSON Schema property be set") + Arguments.of(ValidateJson.JsonSchemaStrategy.SCHEMA_NAME_PROPERTY, "requires that the JSON Schema Registry property be set"), + Arguments.of(ValidateJson.JsonSchemaStrategy.SCHEMA_CONTENT_PROPERTY, "requires that the JSON Schema property be set") ); }