mirror of https://github.com/apache/nifi.git
NIFI-13590 Refactored Standard Processors using current API methods
This closes #9119 Signed-off-by: David Handermann <exceptionfactory@apache.org>
This commit is contained in:
parent
83d2078a4f
commit
458844909b
|
@ -50,7 +50,6 @@ import java.time.ZoneId;
|
|||
import java.time.format.DateTimeFormatter;
|
||||
import java.time.format.DateTimeParseException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
|
@ -67,18 +66,18 @@ import static java.sql.Types.BINARY;
|
|||
import static java.sql.Types.BIT;
|
||||
import static java.sql.Types.BLOB;
|
||||
import static java.sql.Types.BOOLEAN;
|
||||
import static java.sql.Types.CHAR;
|
||||
import static java.sql.Types.CLOB;
|
||||
import static java.sql.Types.DATE;
|
||||
import static java.sql.Types.DECIMAL;
|
||||
import static java.sql.Types.DOUBLE;
|
||||
import static java.sql.Types.FLOAT;
|
||||
import static java.sql.Types.INTEGER;
|
||||
import static java.sql.Types.LONGVARBINARY;
|
||||
import static java.sql.Types.NUMERIC;
|
||||
import static java.sql.Types.CHAR;
|
||||
import static java.sql.Types.DATE;
|
||||
import static java.sql.Types.LONGNVARCHAR;
|
||||
import static java.sql.Types.LONGVARBINARY;
|
||||
import static java.sql.Types.LONGVARCHAR;
|
||||
import static java.sql.Types.NCHAR;
|
||||
import static java.sql.Types.NUMERIC;
|
||||
import static java.sql.Types.NVARCHAR;
|
||||
import static java.sql.Types.REAL;
|
||||
import static java.sql.Types.ROWID;
|
||||
|
@ -283,10 +282,10 @@ public abstract class AbstractDatabaseFetchProcessor extends AbstractSessionFact
|
|||
columnTypeMap.clear();
|
||||
}
|
||||
|
||||
final List<String> maxValueColumnNameList = Arrays.asList(maxValueColumnNames.toLowerCase().split(","));
|
||||
final String[] maxValueColumnNameList = maxValueColumnNames.toLowerCase().split(",");
|
||||
final List<String> maxValueQualifiedColumnNameList = new ArrayList<>();
|
||||
|
||||
for (String maxValueColumn:maxValueColumnNameList) {
|
||||
for (String maxValueColumn : maxValueColumnNameList) {
|
||||
String colKey = getStateKey(tableName, maxValueColumn.trim(), dbAdapter);
|
||||
maxValueQualifiedColumnNameList.add(colKey);
|
||||
}
|
||||
|
@ -304,7 +303,7 @@ public abstract class AbstractDatabaseFetchProcessor extends AbstractSessionFact
|
|||
columnTypeMap.putIfAbsent(colKey, colType);
|
||||
}
|
||||
|
||||
for (String maxValueColumn:maxValueColumnNameList) {
|
||||
for (String maxValueColumn : maxValueColumnNameList) {
|
||||
String colKey = getStateKey(tableName, maxValueColumn.trim().toLowerCase(), dbAdapter);
|
||||
if (!columnTypeMap.containsKey(colKey)) {
|
||||
throw new ProcessException("Column not found in the table/query specified: " + maxValueColumn);
|
||||
|
|
|
@ -517,13 +517,13 @@ public abstract class AbstractExecuteSQL extends AbstractProcessor {
|
|||
* Extract list of queries from config property
|
||||
*/
|
||||
protected List<String> getQueries(final String value) {
|
||||
if (value == null || value.length() == 0 || value.trim().length() == 0) {
|
||||
if (value == null || value.isEmpty() || value.isBlank()) {
|
||||
return null;
|
||||
}
|
||||
final List<String> queries = new LinkedList<>();
|
||||
for (String query : value.split("(?<!\\\\);")) {
|
||||
query = query.replaceAll("\\\\;", ";");
|
||||
if (query.trim().length() > 0) {
|
||||
if (!query.isBlank()) {
|
||||
queries.add(query.trim());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,7 +38,6 @@ import org.apache.nifi.util.StringUtils;
|
|||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -53,15 +52,13 @@ import java.util.concurrent.atomic.AtomicReference;
|
|||
*/
|
||||
public abstract class AbstractJsonPathProcessor extends AbstractProcessor {
|
||||
|
||||
static final Map<String, String> NULL_REPRESENTATION_MAP = new HashMap<>();
|
||||
|
||||
static final String EMPTY_STRING_OPTION = "empty string";
|
||||
static final String NULL_STRING_OPTION = "the string 'null'";
|
||||
|
||||
static {
|
||||
NULL_REPRESENTATION_MAP.put(EMPTY_STRING_OPTION, "");
|
||||
NULL_REPRESENTATION_MAP.put(NULL_STRING_OPTION, "null");
|
||||
}
|
||||
static final Map<String, String> NULL_REPRESENTATION_MAP = Map.of(
|
||||
EMPTY_STRING_OPTION, "",
|
||||
NULL_STRING_OPTION, "null"
|
||||
);
|
||||
|
||||
public static final PropertyDescriptor NULL_VALUE_DEFAULT_REPRESENTATION = new PropertyDescriptor.Builder()
|
||||
.name("Null Value Representation")
|
||||
|
|
|
@ -69,23 +69,23 @@ public abstract class AbstractQueryDatabaseTable extends AbstractDatabaseFetchPr
|
|||
public static final String RESULT_TABLENAME = "tablename";
|
||||
public static final String RESULT_ROW_COUNT = "querydbtable.row.count";
|
||||
|
||||
private static AllowableValue TRANSACTION_READ_COMMITTED = new AllowableValue(
|
||||
private static final AllowableValue TRANSACTION_READ_COMMITTED = new AllowableValue(
|
||||
String.valueOf(Connection.TRANSACTION_READ_COMMITTED),
|
||||
"TRANSACTION_READ_COMMITTED"
|
||||
);
|
||||
private static AllowableValue TRANSACTION_READ_UNCOMMITTED = new AllowableValue(
|
||||
private static final AllowableValue TRANSACTION_READ_UNCOMMITTED = new AllowableValue(
|
||||
String.valueOf(Connection.TRANSACTION_READ_UNCOMMITTED),
|
||||
"TRANSACTION_READ_UNCOMMITTED"
|
||||
);
|
||||
private static AllowableValue TRANSACTION_REPEATABLE_READ = new AllowableValue(
|
||||
private static final AllowableValue TRANSACTION_REPEATABLE_READ = new AllowableValue(
|
||||
String.valueOf(Connection.TRANSACTION_REPEATABLE_READ),
|
||||
"TRANSACTION_REPEATABLE_READ"
|
||||
);
|
||||
private static AllowableValue TRANSACTION_NONE = new AllowableValue(
|
||||
private static final AllowableValue TRANSACTION_NONE = new AllowableValue(
|
||||
String.valueOf(Connection.TRANSACTION_NONE),
|
||||
"TRANSACTION_NONE"
|
||||
);
|
||||
private static AllowableValue TRANSACTION_SERIALIZABLE = new AllowableValue(
|
||||
private static final AllowableValue TRANSACTION_SERIALIZABLE = new AllowableValue(
|
||||
String.valueOf(Connection.TRANSACTION_SERIALIZABLE),
|
||||
"TRANSACTION_SERIALIZABLE"
|
||||
);
|
||||
|
|
|
@ -17,13 +17,13 @@
|
|||
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import org.apache.commons.text.StringEscapeUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttributes;
|
||||
import org.apache.commons.text.StringEscapeUtils;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.SideEffectFree;
|
||||
import org.apache.nifi.annotation.behavior.SupportsBatching;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttributes;
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
import org.apache.nifi.annotation.documentation.Tags;
|
||||
import org.apache.nifi.annotation.lifecycle.OnScheduled;
|
||||
|
@ -34,23 +34,19 @@ import org.apache.nifi.flowfile.FlowFile;
|
|||
import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
||||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Arrays;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.Collections;
|
||||
import java.util.Arrays;
|
||||
import java.util.ArrayList;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
|
@ -150,13 +146,25 @@ public class AttributesToCSV extends AbstractProcessor {
|
|||
.defaultValue("false")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
ATTRIBUTES_LIST,
|
||||
ATTRIBUTES_REGEX,
|
||||
DESTINATION,
|
||||
INCLUDE_CORE_ATTRIBUTES,
|
||||
NULL_VALUE_FOR_EMPTY_STRING,
|
||||
INCLUDE_SCHEMA
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success")
|
||||
.description("Successfully converted attributes to CSV").build();
|
||||
public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure")
|
||||
.description("Failed to convert attributes to CSV").build();
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
private Set<Relationship> relationships;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private volatile Boolean includeCoreAttributes;
|
||||
private volatile Set<String> coreAttributes;
|
||||
private volatile boolean destinationContent;
|
||||
|
@ -164,31 +172,14 @@ public class AttributesToCSV extends AbstractProcessor {
|
|||
private volatile Pattern pattern;
|
||||
private volatile Boolean includeSchema;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(ATTRIBUTES_LIST);
|
||||
properties.add(ATTRIBUTES_REGEX);
|
||||
properties.add(DESTINATION);
|
||||
properties.add(INCLUDE_CORE_ATTRIBUTES);
|
||||
properties.add(NULL_VALUE_FOR_EMPTY_STRING);
|
||||
properties.add(INCLUDE_SCHEMA);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
relationships.add(REL_FAILURE);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
|
||||
|
@ -311,7 +302,7 @@ public class AttributesToCSV extends AbstractProcessor {
|
|||
if (destinationContent) {
|
||||
FlowFile conFlowfile = session.write(original, (in, out) -> {
|
||||
if (includeSchema) {
|
||||
sbNames.append(System.getProperty("line.separator"));
|
||||
sbNames.append(System.lineSeparator());
|
||||
out.write(sbNames.toString().getBytes());
|
||||
}
|
||||
out.write(sbValues.toString().getBytes());
|
||||
|
|
|
@ -36,16 +36,13 @@ import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
@ -157,7 +154,7 @@ public class AttributesToJSON extends AbstractProcessor {
|
|||
.required(true)
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.allowableValues(JsonHandlingStrategy.class)
|
||||
.defaultValue(AttributesToJSON.JsonHandlingStrategy.ESCAPED.getValue())
|
||||
.defaultValue(AttributesToJSON.JsonHandlingStrategy.ESCAPED)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PRETTY_PRINT = new PropertyDescriptor.Builder()
|
||||
|
@ -170,14 +167,27 @@ public class AttributesToJSON extends AbstractProcessor {
|
|||
.dependsOn(DESTINATION, DESTINATION_CONTENT)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
ATTRIBUTES_LIST,
|
||||
ATTRIBUTES_REGEX,
|
||||
DESTINATION,
|
||||
INCLUDE_CORE_ATTRIBUTES,
|
||||
NULL_VALUE_FOR_EMPTY_STRING,
|
||||
JSON_HANDLING_STRATEGY,
|
||||
PRETTY_PRINT
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success")
|
||||
.description("Successfully converted attributes to JSON").build();
|
||||
public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure")
|
||||
.description("Failed to convert attributes to JSON").build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
|
||||
private List<PropertyDescriptor> properties;
|
||||
private Set<Relationship> relationships;
|
||||
private volatile Set<String> attributesToRemove;
|
||||
private volatile Set<String> attributes;
|
||||
private volatile Boolean nullValueForEmptyString;
|
||||
|
@ -186,32 +196,14 @@ public class AttributesToJSON extends AbstractProcessor {
|
|||
private volatile Pattern pattern;
|
||||
private volatile JsonHandlingStrategy jsonHandlingStrategy;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(ATTRIBUTES_LIST);
|
||||
properties.add(ATTRIBUTES_REGEX);
|
||||
properties.add(DESTINATION);
|
||||
properties.add(INCLUDE_CORE_ATTRIBUTES);
|
||||
properties.add(NULL_VALUE_FOR_EMPTY_STRING);
|
||||
properties.add(JSON_HANDLING_STRATEGY);
|
||||
properties.add(PRETTY_PRINT);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
relationships.add(REL_FAILURE);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -272,7 +264,7 @@ public class AttributesToJSON extends AbstractProcessor {
|
|||
|
||||
@OnScheduled
|
||||
public void onScheduled(ProcessContext context) {
|
||||
attributesToRemove = context.getProperty(INCLUDE_CORE_ATTRIBUTES).asBoolean() ? Collections.EMPTY_SET : Arrays.stream(CoreAttributes.values())
|
||||
attributesToRemove = context.getProperty(INCLUDE_CORE_ATTRIBUTES).asBoolean() ? Set.of() : Arrays.stream(CoreAttributes.values())
|
||||
.map(CoreAttributes::key)
|
||||
.collect(Collectors.toSet());
|
||||
attributes = buildAtrs(context.getProperty(ATTRIBUTES_LIST).getValue());
|
||||
|
@ -280,7 +272,7 @@ public class AttributesToJSON extends AbstractProcessor {
|
|||
destinationContent = DESTINATION_CONTENT.equals(context.getProperty(DESTINATION).getValue());
|
||||
final boolean prettyPrint = context.getProperty(PRETTY_PRINT).asBoolean();
|
||||
objectWriter = destinationContent && prettyPrint ? OBJECT_MAPPER.writerWithDefaultPrettyPrinter() : OBJECT_MAPPER.writer();
|
||||
jsonHandlingStrategy = JsonHandlingStrategy.valueOf(context.getProperty(JSON_HANDLING_STRATEGY).getValue());
|
||||
jsonHandlingStrategy = context.getProperty(JSON_HANDLING_STRATEGY).asAllowableValue(JsonHandlingStrategy.class);
|
||||
|
||||
if (context.getProperty(ATTRIBUTES_REGEX).isSet()) {
|
||||
pattern = Pattern.compile(context.getProperty(ATTRIBUTES_REGEX).evaluateAttributeExpressions().getValue());
|
||||
|
|
|
@ -45,7 +45,6 @@ import java.io.InputStream;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
|
@ -87,6 +86,11 @@ public class CalculateRecordStats extends AbstractProcessor {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
|
||||
.build();
|
||||
|
||||
static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
RECORD_READER,
|
||||
LIMIT
|
||||
);
|
||||
|
||||
static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("If a flowfile is successfully processed, it goes here.")
|
||||
|
@ -96,22 +100,13 @@ public class CalculateRecordStats extends AbstractProcessor {
|
|||
.description("If a flowfile fails to be processed, it goes here.")
|
||||
.build();
|
||||
|
||||
static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private RecordPathCache cache;
|
||||
|
||||
static final Set RELATIONSHIPS;
|
||||
static final List<PropertyDescriptor> PROPERTIES;
|
||||
|
||||
static {
|
||||
Set _rels = new HashSet();
|
||||
_rels.add(REL_SUCCESS);
|
||||
_rels.add(REL_FAILURE);
|
||||
RELATIONSHIPS = Collections.unmodifiableSet(_rels);
|
||||
List<PropertyDescriptor> _temp = new ArrayList<>();
|
||||
_temp.add(RECORD_READER);
|
||||
_temp.add(LIMIT);
|
||||
PROPERTIES = Collections.unmodifiableList(_temp);
|
||||
}
|
||||
|
||||
protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) {
|
||||
return new PropertyDescriptor.Builder()
|
||||
.name(propertyDescriptorName)
|
||||
|
@ -160,7 +155,7 @@ public class CalculateRecordStats extends AbstractProcessor {
|
|||
|
||||
protected Map<String, RecordPath> getRecordPaths(ProcessContext context, FlowFile flowFile) {
|
||||
return context.getProperties().keySet()
|
||||
.stream().filter(p -> p.isDynamic())
|
||||
.stream().filter(PropertyDescriptor::isDynamic)
|
||||
.collect(Collectors.toMap(
|
||||
e -> e.getName(),
|
||||
e -> {
|
||||
|
@ -189,7 +184,7 @@ public class CalculateRecordStats extends AbstractProcessor {
|
|||
String approxValue = value.get().getValue().toString();
|
||||
String baseKey = String.format("recordStats.%s", entry.getKey());
|
||||
String key = String.format("%s.%s", baseKey, approxValue);
|
||||
Integer stat = retVal.containsKey(key) ? retVal.get(key) : 0;
|
||||
Integer stat = retVal.getOrDefault(key, 0);
|
||||
Integer baseStat = retVal.getOrDefault(baseKey, 0);
|
||||
stat++;
|
||||
baseStat++;
|
||||
|
@ -224,10 +219,10 @@ public class CalculateRecordStats extends AbstractProcessor {
|
|||
protected Map filterBySize(Map<String, Integer> values, Integer limit, List<String> baseKeys) {
|
||||
Map<String, Integer> toFilter = values.entrySet().stream()
|
||||
.filter(e -> !baseKeys.contains(e.getKey()))
|
||||
.collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue()));
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
Map<String, Integer> retVal = values.entrySet().stream()
|
||||
.filter((e -> baseKeys.contains(e.getKey())))
|
||||
.collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue()));
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
|
||||
|
||||
List<Map.Entry<String, Integer>> _flat = new ArrayList<>(toFilter.entrySet());
|
||||
_flat.sort(Map.Entry.comparingByValue());
|
||||
|
|
|
@ -187,6 +187,13 @@ public class CompressContent extends AbstractProcessor {
|
|||
.defaultValue("false")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
MODE,
|
||||
COMPRESSION_FORMAT,
|
||||
COMPRESSION_LEVEL,
|
||||
UPDATE_FILENAME
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("FlowFiles will be transferred to the success relationship after successfully being compressed or decompressed")
|
||||
|
@ -196,13 +203,10 @@ public class CompressContent extends AbstractProcessor {
|
|||
.description("FlowFiles will be transferred to the failure relationship if they fail to compress/decompress")
|
||||
.build();
|
||||
|
||||
private final List<PropertyDescriptor> properties = List.of(MODE,
|
||||
COMPRESSION_FORMAT,
|
||||
COMPRESSION_LEVEL,
|
||||
UPDATE_FILENAME);
|
||||
|
||||
private final Set<Relationship> relationships = Set.of(REL_SUCCESS,
|
||||
REL_FAILURE);
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private final Map<String, String> compressionFormatMimeTypeMap = Map.ofEntries(
|
||||
Map.entry("application/gzip", COMPRESSION_FORMAT_GZIP),
|
||||
|
@ -222,12 +226,12 @@ public class CompressContent extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -439,7 +443,7 @@ public class CompressContent extends AbstractProcessor {
|
|||
if (closeable != null) {
|
||||
try {
|
||||
closeable.close();
|
||||
} catch (final Exception e) {
|
||||
} catch (final Exception ignored) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -208,6 +208,17 @@ public class ControlRate extends AbstractProcessor {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
RATE_CONTROL_CRITERIA,
|
||||
TIME_PERIOD,
|
||||
MAX_RATE,
|
||||
MAX_DATA_RATE,
|
||||
MAX_COUNT_RATE,
|
||||
RATE_EXCEEDED_STRATEGY,
|
||||
RATE_CONTROL_ATTRIBUTE_NAME,
|
||||
GROUPING_ATTRIBUTE_NAME
|
||||
);
|
||||
|
||||
static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("FlowFiles are transferred to this relationship under normal conditions")
|
||||
|
@ -222,23 +233,20 @@ public class ControlRate extends AbstractProcessor {
|
|||
"Strategy is configured to use this Relationship.")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> DEFAULT_RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
private static final Set<Relationship> RATE_EXCEEDED_RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE,
|
||||
REL_RATE_EXCEEDED
|
||||
);
|
||||
|
||||
private static final Pattern POSITIVE_LONG_PATTERN = Pattern.compile("0*[1-9][0-9]*");
|
||||
private static final String DEFAULT_GROUP_ATTRIBUTE = ControlRate.class.getName() + "###____DEFAULT_GROUP_ATTRIBUTE___###";
|
||||
|
||||
private static final List<PropertyDescriptor> properties = List.of(
|
||||
RATE_CONTROL_CRITERIA,
|
||||
TIME_PERIOD,
|
||||
MAX_RATE,
|
||||
MAX_DATA_RATE,
|
||||
MAX_COUNT_RATE,
|
||||
RATE_EXCEEDED_STRATEGY,
|
||||
RATE_CONTROL_ATTRIBUTE_NAME,
|
||||
GROUPING_ATTRIBUTE_NAME
|
||||
);
|
||||
|
||||
private static final Set<Relationship> defaultRelationships = Set.of(REL_SUCCESS, REL_FAILURE);
|
||||
private static final Set<Relationship> rateExceededRelationships = Set.of(REL_SUCCESS, REL_FAILURE, REL_RATE_EXCEEDED);
|
||||
private volatile Set<Relationship> relationships = defaultRelationships;
|
||||
private volatile Set<Relationship> relationships = DEFAULT_RELATIONSHIPS;
|
||||
|
||||
private final ConcurrentMap<String, Throttle> dataThrottleMap = new ConcurrentHashMap<>();
|
||||
private final ConcurrentMap<String, Throttle> countThrottleMap = new ConcurrentHashMap<>();
|
||||
|
@ -253,7 +261,7 @@ public class ControlRate extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -300,9 +308,9 @@ public class ControlRate extends AbstractProcessor {
|
|||
|
||||
if (descriptor.equals(RATE_EXCEEDED_STRATEGY)) {
|
||||
if (ROUTE_TO_RATE_EXCEEDED.getValue().equalsIgnoreCase(newValue)) {
|
||||
this.relationships = rateExceededRelationships;
|
||||
this.relationships = RATE_EXCEEDED_RELATIONSHIPS;
|
||||
} else {
|
||||
this.relationships = defaultRelationships;
|
||||
this.relationships = DEFAULT_RELATIONSHIPS;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,25 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.SideEffectFree;
|
||||
import org.apache.nifi.annotation.behavior.SupportsBatching;
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
import org.apache.nifi.annotation.documentation.Tags;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||
import org.apache.nifi.flowfile.FlowFile;
|
||||
import org.apache.nifi.logging.ComponentLog;
|
||||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.io.StreamCallback;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.util.StopWatch;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.IOException;
|
||||
|
@ -23,35 +42,11 @@ import java.io.InputStream;
|
|||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||
import org.apache.nifi.flowfile.FlowFile;
|
||||
import org.apache.nifi.logging.ComponentLog;
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.SideEffectFree;
|
||||
import org.apache.nifi.annotation.behavior.SupportsBatching;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.documentation.Tags;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.io.StreamCallback;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.util.StopWatch;
|
||||
|
||||
import java.nio.CharBuffer;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.CharsetDecoder;
|
||||
import java.nio.charset.CharsetEncoder;
|
||||
import java.nio.charset.CodingErrorAction;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
@ -99,33 +94,25 @@ public class ConvertCharacterSet extends AbstractProcessor {
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
private final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
INPUT_CHARSET,
|
||||
OUTPUT_CHARSET
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success").description("").build();
|
||||
|
||||
private final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
public static final int MAX_BUFFER_SIZE = 512 * 1024;
|
||||
|
||||
private Set<Relationship> relationships;
|
||||
private List<PropertyDescriptor> properties;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(INPUT_CHARSET);
|
||||
properties.add(OUTPUT_CHARSET);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.apache.nifi.processor.ProcessContext;
|
|||
import org.apache.nifi.serialization.record.Record;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
@SupportsBatching
|
||||
|
@ -60,12 +61,11 @@ public class ConvertRecord extends AbstractRecordProcessor {
|
|||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>(super.getSupportedPropertyDescriptors());
|
||||
properties.add(INCLUDE_ZERO_RECORD_FLOWFILES);
|
||||
return properties;
|
||||
return Collections.unmodifiableList(properties);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Record process(final Record record, final FlowFile flowFile, final ProcessContext context, final long count) {
|
||||
return record;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -16,26 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.DecimalFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.SideEffectFree;
|
||||
|
@ -56,6 +36,23 @@ import org.apache.nifi.processor.exception.ProcessException;
|
|||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.util.StringUtils;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.DecimalFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
@Tags({"count", "text", "line", "word", "character"})
|
||||
|
@ -70,13 +67,14 @@ import org.apache.nifi.util.StringUtils;
|
|||
})
|
||||
@SeeAlso(SplitText.class)
|
||||
public class CountText extends AbstractProcessor {
|
||||
private static final List<Charset> STANDARD_CHARSETS = Arrays.asList(
|
||||
private static final List<Charset> STANDARD_CHARSETS = List.of(
|
||||
StandardCharsets.UTF_8,
|
||||
StandardCharsets.US_ASCII,
|
||||
StandardCharsets.ISO_8859_1,
|
||||
StandardCharsets.UTF_16,
|
||||
StandardCharsets.UTF_16LE,
|
||||
StandardCharsets.UTF_16BE);
|
||||
StandardCharsets.UTF_16BE
|
||||
);
|
||||
|
||||
private static final Pattern SYMBOL_PATTERN = Pattern.compile("[\\s-\\._]");
|
||||
private static final Pattern WHITESPACE_ONLY_PATTERN = Pattern.compile("\\s");
|
||||
|
@ -152,9 +150,16 @@ public class CountText extends AbstractProcessor {
|
|||
.defaultValue("false")
|
||||
.addValidator(StandardValidators.BOOLEAN_VALIDATOR)
|
||||
.build();
|
||||
private static Set<String> getStandardCharsetNames() {
|
||||
return STANDARD_CHARSETS.stream().map(c -> c.displayName()).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
TEXT_LINE_COUNT_PD,
|
||||
TEXT_LINE_NONEMPTY_COUNT_PD,
|
||||
TEXT_WORD_COUNT_PD,
|
||||
TEXT_CHARACTER_COUNT_PD,
|
||||
SPLIT_WORDS_ON_SYMBOLS_PD,
|
||||
CHARACTER_ENCODING_PD,
|
||||
ADJUST_IMMEDIATELY
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
|
@ -165,20 +170,13 @@ public class CountText extends AbstractProcessor {
|
|||
.description("If the flowfile text cannot be counted for some reason, the original file will be routed to this destination and nothing will be routed elsewhere")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> properties;
|
||||
private static final Set<Relationship> relationships;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
static {
|
||||
properties = Collections.unmodifiableList(Arrays.asList(TEXT_LINE_COUNT_PD,
|
||||
TEXT_LINE_NONEMPTY_COUNT_PD,
|
||||
TEXT_WORD_COUNT_PD,
|
||||
TEXT_CHARACTER_COUNT_PD,
|
||||
SPLIT_WORDS_ON_SYMBOLS_PD,
|
||||
CHARACTER_ENCODING_PD,
|
||||
ADJUST_IMMEDIATELY));
|
||||
|
||||
relationships = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(REL_SUCCESS,
|
||||
REL_FAILURE)));
|
||||
private static Set<String> getStandardCharsetNames() {
|
||||
return STANDARD_CHARSETS.stream().map(Charset::displayName).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
private volatile boolean countLines;
|
||||
|
@ -191,7 +189,7 @@ public class CountText extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
@ -240,7 +238,7 @@ public class CountText extends AbstractProcessor {
|
|||
}
|
||||
|
||||
if (countLinesNonEmpty) {
|
||||
if (line.trim().length() > 0) {
|
||||
if (!line.isBlank()) {
|
||||
lineNonEmptyCount.incrementAndGet();
|
||||
}
|
||||
}
|
||||
|
@ -316,7 +314,7 @@ public class CountText extends AbstractProcessor {
|
|||
}
|
||||
|
||||
int countWordsInLine(String line, boolean splitWordsOnSymbols) throws IOException {
|
||||
if (line == null || line.trim().length() == 0) {
|
||||
if (line == null || line.isBlank()) {
|
||||
return 0;
|
||||
} else {
|
||||
Pattern regex = splitWordsOnSymbols ? SYMBOL_PATTERN : WHITESPACE_ONLY_PATTERN;
|
||||
|
@ -333,6 +331,6 @@ public class CountText extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,12 +17,6 @@
|
|||
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.SideEffectFree;
|
||||
import org.apache.nifi.annotation.behavior.SupportsBatching;
|
||||
|
@ -35,13 +29,16 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.processors.standard.hash.HashAlgorithm;
|
||||
import org.apache.nifi.processors.standard.hash.HashService;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
@Tags({"content", "hash", "sha", "blake2", "md5", "cryptography"})
|
||||
|
@ -73,6 +70,11 @@ public class CryptographicHashContent extends AbstractProcessor {
|
|||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
FAIL_WHEN_EMPTY,
|
||||
HASH_ALGORITHM
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("Used for flowfiles that have a hash value added")
|
||||
|
@ -83,31 +85,19 @@ public class CryptographicHashContent extends AbstractProcessor {
|
|||
.description("Used for flowfiles that have no content if the 'fail on empty' setting is enabled")
|
||||
.build();
|
||||
|
||||
private static Set<Relationship> relationships;
|
||||
|
||||
private static List<PropertyDescriptor> properties;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final Set<Relationship> _relationships = new HashSet<>();
|
||||
_relationships.add(REL_FAILURE);
|
||||
_relationships.add(REL_SUCCESS);
|
||||
relationships = Collections.unmodifiableSet(_relationships);
|
||||
|
||||
final List<PropertyDescriptor> _properties = new ArrayList<>();
|
||||
_properties.add(FAIL_WHEN_EMPTY);
|
||||
_properties.add(HASH_ALGORITHM);
|
||||
properties = Collections.unmodifiableList(_properties);
|
||||
}
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_FAILURE,
|
||||
REL_SUCCESS
|
||||
);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,19 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
import org.apache.nifi.annotation.documentation.Tags;
|
||||
import org.apache.nifi.annotation.lifecycle.OnScheduled;
|
||||
|
@ -51,6 +38,16 @@ import org.apache.nifi.processor.exception.ProcessException;
|
|||
import org.apache.nifi.processor.io.OutputStreamCallback;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
@Tags({"test", "debug", "processor", "utility", "flow", "FlowFile"})
|
||||
@CapabilityDescription("The DebugFlow processor aids testing and debugging the FlowFile framework by allowing various "
|
||||
+ "responses to be explicitly triggered in response to the receipt of a FlowFile or a timer event without a "
|
||||
|
@ -69,7 +66,7 @@ public class DebugFlow extends AbstractProcessor {
|
|||
.description("FlowFiles that failed to process.")
|
||||
.build();
|
||||
|
||||
private final AtomicReference<List<PropertyDescriptor>> propertyDescriptors = new AtomicReference<>();
|
||||
private final AtomicReference<List<PropertyDescriptor>> properties = new AtomicReference<>();
|
||||
|
||||
static final PropertyDescriptor FF_SUCCESS_ITERATIONS = new PropertyDescriptor.Builder()
|
||||
.name("FlowFile Success Iterations")
|
||||
|
@ -264,10 +261,7 @@ public class DebugFlow extends AbstractProcessor {
|
|||
public Set<Relationship> getRelationships() {
|
||||
synchronized (relationships) {
|
||||
if (relationships.get() == null) {
|
||||
HashSet<Relationship> relSet = new HashSet<>();
|
||||
relSet.add(REL_SUCCESS);
|
||||
relSet.add(REL_FAILURE);
|
||||
relationships.compareAndSet(null, Collections.unmodifiableSet(relSet));
|
||||
relationships.compareAndSet(null, Set.of(REL_SUCCESS, REL_FAILURE));
|
||||
}
|
||||
return relationships.get();
|
||||
}
|
||||
|
@ -275,35 +269,36 @@ public class DebugFlow extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
synchronized (propertyDescriptors) {
|
||||
if (propertyDescriptors.get() == null) {
|
||||
ArrayList<PropertyDescriptor> propList = new ArrayList<>();
|
||||
propList.add(FF_SUCCESS_ITERATIONS);
|
||||
propList.add(FF_FAILURE_ITERATIONS);
|
||||
propList.add(FF_ROLLBACK_ITERATIONS);
|
||||
propList.add(FF_ROLLBACK_YIELD_ITERATIONS);
|
||||
propList.add(FF_ROLLBACK_PENALTY_ITERATIONS);
|
||||
propList.add(FF_EXCEPTION_ITERATIONS);
|
||||
propList.add(FF_EXCEPTION_CLASS);
|
||||
propList.add(NO_FF_SKIP_ITERATIONS);
|
||||
propList.add(NO_FF_EXCEPTION_ITERATIONS);
|
||||
propList.add(NO_FF_YIELD_ITERATIONS);
|
||||
propList.add(NO_FF_EXCEPTION_CLASS);
|
||||
propList.add(WRITE_ITERATIONS);
|
||||
propList.add(CONTENT_SIZE);
|
||||
propList.add(ON_SCHEDULED_SLEEP_TIME);
|
||||
propList.add(ON_SCHEDULED_FAIL);
|
||||
propList.add(ON_UNSCHEDULED_SLEEP_TIME);
|
||||
propList.add(ON_UNSCHEDULED_FAIL);
|
||||
propList.add(ON_STOPPED_SLEEP_TIME);
|
||||
propList.add(ON_STOPPED_FAIL);
|
||||
propList.add(ON_TRIGGER_SLEEP_TIME);
|
||||
propList.add(CUSTOM_VALIDATE_SLEEP_TIME);
|
||||
propList.add(IGNORE_INTERRUPTS);
|
||||
synchronized (properties) {
|
||||
if (properties.get() == null) {
|
||||
List<PropertyDescriptor> properties = List.of(
|
||||
FF_SUCCESS_ITERATIONS,
|
||||
FF_FAILURE_ITERATIONS,
|
||||
FF_ROLLBACK_ITERATIONS,
|
||||
FF_ROLLBACK_YIELD_ITERATIONS,
|
||||
FF_ROLLBACK_PENALTY_ITERATIONS,
|
||||
FF_EXCEPTION_ITERATIONS,
|
||||
FF_EXCEPTION_CLASS,
|
||||
NO_FF_SKIP_ITERATIONS,
|
||||
NO_FF_EXCEPTION_ITERATIONS,
|
||||
NO_FF_YIELD_ITERATIONS,
|
||||
NO_FF_EXCEPTION_CLASS,
|
||||
WRITE_ITERATIONS,
|
||||
CONTENT_SIZE,
|
||||
ON_SCHEDULED_SLEEP_TIME,
|
||||
ON_SCHEDULED_FAIL,
|
||||
ON_UNSCHEDULED_SLEEP_TIME,
|
||||
ON_UNSCHEDULED_FAIL,
|
||||
ON_STOPPED_SLEEP_TIME,
|
||||
ON_STOPPED_FAIL,
|
||||
ON_TRIGGER_SLEEP_TIME,
|
||||
CUSTOM_VALIDATE_SLEEP_TIME,
|
||||
IGNORE_INTERRUPTS
|
||||
);
|
||||
|
||||
propertyDescriptors.compareAndSet(null, Collections.unmodifiableList(propList));
|
||||
this.properties.compareAndSet(null, properties);
|
||||
}
|
||||
return propertyDescriptors.get();
|
||||
return properties.get();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -352,7 +347,7 @@ public class DebugFlow extends AbstractProcessor {
|
|||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
|
||||
return Collections.singleton(new ValidationResult.Builder()
|
||||
return Set.of(new ValidationResult.Builder()
|
||||
.valid(false)
|
||||
.subject("Validation")
|
||||
.explanation("Processor Interrupted while performing validation").build());
|
||||
|
|
|
@ -47,7 +47,6 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
@ -71,7 +70,6 @@ import java.nio.charset.StandardCharsets;
|
|||
import java.security.MessageDigest;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -266,8 +264,19 @@ public class DeduplicateRecord extends AbstractProcessor {
|
|||
.required(false)
|
||||
.build();
|
||||
|
||||
|
||||
// RELATIONSHIPS
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DEDUPLICATION_STRATEGY,
|
||||
DISTRIBUTED_MAP_CACHE,
|
||||
CACHE_IDENTIFIER,
|
||||
PUT_CACHE_IDENTIFIER,
|
||||
RECORD_READER,
|
||||
RECORD_WRITER,
|
||||
INCLUDE_ZERO_RECORD_FLOWFILES,
|
||||
RECORD_HASHING_ALGORITHM,
|
||||
FILTER_TYPE,
|
||||
FILTER_CAPACITY_HINT,
|
||||
BLOOM_FILTER_FPP
|
||||
);
|
||||
|
||||
static final Relationship REL_DUPLICATE = new Relationship.Builder()
|
||||
.name("duplicate")
|
||||
|
@ -289,42 +298,21 @@ public class DeduplicateRecord extends AbstractProcessor {
|
|||
.description("If unable to communicate with the cache, the FlowFile will be penalized and routed to this relationship")
|
||||
.build();
|
||||
|
||||
private List<PropertyDescriptor> descriptors;
|
||||
|
||||
private Set<Relationship> relationships;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
descriptors.add(DEDUPLICATION_STRATEGY);
|
||||
descriptors.add(DISTRIBUTED_MAP_CACHE);
|
||||
descriptors.add(CACHE_IDENTIFIER);
|
||||
descriptors.add(PUT_CACHE_IDENTIFIER);
|
||||
descriptors.add(RECORD_READER);
|
||||
descriptors.add(RECORD_WRITER);
|
||||
descriptors.add(INCLUDE_ZERO_RECORD_FLOWFILES);
|
||||
descriptors.add(RECORD_HASHING_ALGORITHM);
|
||||
descriptors.add(FILTER_TYPE);
|
||||
descriptors.add(FILTER_CAPACITY_HINT);
|
||||
descriptors.add(BLOOM_FILTER_FPP);
|
||||
this.descriptors = Collections.unmodifiableList(descriptors);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_DUPLICATE);
|
||||
relationships.add(REL_NON_DUPLICATE);
|
||||
relationships.add(REL_ORIGINAL);
|
||||
relationships.add(REL_FAILURE);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_DUPLICATE,
|
||||
REL_NON_DUPLICATE,
|
||||
REL_ORIGINAL,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return this.relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return descriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -552,7 +540,7 @@ public class DeduplicateRecord extends AbstractProcessor {
|
|||
final String value = context.getProperty(propertyDescriptor).evaluateAttributeExpressions(flowFile).getValue();
|
||||
final RecordPath recordPath = recordPathCache.getCompiled(value);
|
||||
final RecordPathResult result = recordPath.evaluate(record);
|
||||
final List<FieldValue> selectedFields = result.getSelectedFields().collect(Collectors.toList());
|
||||
final List<FieldValue> selectedFields = result.getSelectedFields().toList();
|
||||
|
||||
// Add the name of the dynamic property
|
||||
fieldValues.add(propertyDescriptor.getName());
|
||||
|
@ -561,7 +549,7 @@ public class DeduplicateRecord extends AbstractProcessor {
|
|||
fieldValues.addAll(selectedFields.stream()
|
||||
.filter(f -> f.getValue() != null)
|
||||
.map(f -> f.getValue().toString())
|
||||
.collect(Collectors.toList())
|
||||
.toList()
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -85,7 +85,11 @@ public class DeleteFile extends AbstractProcessor {
|
|||
.description("All FlowFiles, for which an existing file could not be deleted, are routed to this relationship")
|
||||
.build();
|
||||
|
||||
private final static Set<Relationship> relationships = Set.of(REL_SUCCESS, REL_NOT_FOUND, REL_FAILURE);
|
||||
private final static Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_NOT_FOUND,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
public static final PropertyDescriptor DIRECTORY_PATH = new PropertyDescriptor.Builder()
|
||||
.name("Directory Path")
|
||||
|
@ -105,16 +109,19 @@ public class DeleteFile extends AbstractProcessor {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
|
||||
.build();
|
||||
|
||||
private final static List<PropertyDescriptor> properties = List.of(DIRECTORY_PATH, FILENAME);
|
||||
private final static List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DIRECTORY_PATH,
|
||||
FILENAME
|
||||
);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,33 +16,23 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.annotation.behavior.DefaultRunDuration;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.SupportsBatching;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
import org.apache.nifi.annotation.documentation.SeeAlso;
|
||||
import org.apache.nifi.annotation.documentation.Tags;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.distributed.cache.client.Deserializer;
|
||||
import org.apache.nifi.distributed.cache.client.DistributedMapCacheClient;
|
||||
import org.apache.nifi.distributed.cache.client.Serializer;
|
||||
import org.apache.nifi.distributed.cache.client.exception.DeserializationException;
|
||||
import org.apache.nifi.distributed.cache.client.exception.SerializationException;
|
||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||
import org.apache.nifi.expression.AttributeExpression.ResultType;
|
||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||
import org.apache.nifi.flowfile.FlowFile;
|
||||
import org.apache.nifi.logging.ComponentLog;
|
||||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
|
@ -52,6 +42,13 @@ import org.apache.nifi.processor.Relationship;
|
|||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@SupportsBatching(defaultDuration = DefaultRunDuration.TWENTY_FIVE_MILLIS)
|
||||
@Tags({"hash", "dupe", "duplicate", "dedupe"})
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
|
@ -111,6 +108,14 @@ public class DetectDuplicate extends AbstractProcessor {
|
|||
.defaultValue("true")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
CACHE_ENTRY_IDENTIFIER,
|
||||
FLOWFILE_DESCRIPTION,
|
||||
AGE_OFF_DURATION,
|
||||
DISTRIBUTED_CACHE_SERVICE,
|
||||
CACHE_IDENTIFIER
|
||||
);
|
||||
|
||||
public static final Relationship REL_DUPLICATE = new Relationship.Builder()
|
||||
.name("duplicate")
|
||||
.description("If a FlowFile has been detected to be a duplicate, it will be routed to this relationship")
|
||||
|
@ -123,34 +128,25 @@ public class DetectDuplicate extends AbstractProcessor {
|
|||
.name("failure")
|
||||
.description("If unable to communicate with the cache, the FlowFile will be penalized and routed to this relationship")
|
||||
.build();
|
||||
private final Set<Relationship> relationships;
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_DUPLICATE,
|
||||
REL_NON_DUPLICATE,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private final Serializer<String> keySerializer = new StringSerializer();
|
||||
private final Serializer<CacheValue> valueSerializer = new CacheValueSerializer();
|
||||
private final Deserializer<CacheValue> valueDeserializer = new CacheValueDeserializer();
|
||||
|
||||
public DetectDuplicate() {
|
||||
final Set<Relationship> rels = new HashSet<>();
|
||||
rels.add(REL_DUPLICATE);
|
||||
rels.add(REL_NON_DUPLICATE);
|
||||
rels.add(REL_FAILURE);
|
||||
relationships = Collections.unmodifiableSet(rels);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
descriptors.add(CACHE_ENTRY_IDENTIFIER);
|
||||
descriptors.add(FLOWFILE_DESCRIPTION);
|
||||
descriptors.add(AGE_OFF_DURATION);
|
||||
descriptors.add(DISTRIBUTED_CACHE_SERVICE);
|
||||
descriptors.add(CACHE_IDENTIFIER);
|
||||
return descriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -272,8 +268,7 @@ public class DetectDuplicate extends AbstractProcessor {
|
|||
+ ((input[6] & 255) << 8)
|
||||
+ ((input[7] & 255));
|
||||
String description = new String(input, 8, input.length - 8, StandardCharsets.UTF_8);
|
||||
CacheValue value = new CacheValue(description, time);
|
||||
return value;
|
||||
return new CacheValue(description, time);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,17 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.DefaultRunDuration;
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
||||
import org.apache.nifi.annotation.behavior.DynamicRelationship;
|
||||
|
@ -53,6 +42,16 @@ import org.apache.nifi.processor.ProcessorInitializationContext;
|
|||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching(defaultDuration = DefaultRunDuration.TWENTY_FIVE_MILLIS)
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
|
@ -83,8 +82,6 @@ public class DistributeLoad extends AbstractProcessor {
|
|||
public static final AllowableValue STRATEGY_OVERFLOW = new AllowableValue(OVERFLOW, OVERFLOW,
|
||||
"Relationship selection is the first available relationship without further distribution among all relationships; at least one relationship must be available.");
|
||||
|
||||
|
||||
|
||||
public static final PropertyDescriptor NUM_RELATIONSHIPS = new PropertyDescriptor.Builder()
|
||||
.name("Number of Relationships")
|
||||
.description("Determines the number of Relationships to which the load should be distributed")
|
||||
|
@ -99,9 +96,14 @@ public class DistributeLoad extends AbstractProcessor {
|
|||
.allowableValues(STRATEGY_ROUND_ROBIN, STRATEGY_NEXT_AVAILABLE, STRATEGY_OVERFLOW)
|
||||
.defaultValue(ROUND_ROBIN)
|
||||
.build();
|
||||
|
||||
private List<PropertyDescriptor> properties = List.of(
|
||||
NUM_RELATIONSHIPS,
|
||||
DISTRIBUTION_STRATEGY
|
||||
);
|
||||
|
||||
public static final String RELATIONSHIP_ATTRIBUTE = "distribute.load.relationship";
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
private final AtomicReference<Set<Relationship>> relationshipsRef = new AtomicReference<>();
|
||||
private final AtomicReference<DistributionStrategy> strategyRef = new AtomicReference<>(new RoundRobinStrategy());
|
||||
private final AtomicReference<List<Relationship>> weightedRelationshipListRef = new AtomicReference<>();
|
||||
|
@ -109,14 +111,7 @@ public class DistributeLoad extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(createRelationship(1));
|
||||
relationshipsRef.set(Collections.unmodifiableSet(relationships));
|
||||
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(NUM_RELATIONSHIPS);
|
||||
properties.add(DISTRIBUTION_STRATEGY);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
relationshipsRef.set(Set.of(createRelationship(1)));
|
||||
}
|
||||
|
||||
private static Relationship createRelationship(final int num) {
|
||||
|
@ -136,7 +131,7 @@ public class DistributeLoad extends AbstractProcessor {
|
|||
for (int i = 1; i <= Integer.parseInt(newValue); i++) {
|
||||
relationships.add(createRelationship(i));
|
||||
}
|
||||
this.relationshipsRef.set(Collections.unmodifiableSet(relationships));
|
||||
this.relationshipsRef.set(Set.copyOf(relationships));
|
||||
} else if (descriptor.equals(DISTRIBUTION_STRATEGY)) {
|
||||
switch (newValue.toLowerCase()) {
|
||||
case ROUND_ROBIN:
|
||||
|
@ -158,10 +153,7 @@ public class DistributeLoad extends AbstractProcessor {
|
|||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
if (doSetProps.getAndSet(false)) {
|
||||
final List<PropertyDescriptor> props = new ArrayList<>();
|
||||
props.add(NUM_RELATIONSHIPS);
|
||||
props.add(DISTRIBUTION_STRATEGY);
|
||||
this.properties = Collections.unmodifiableList(props);
|
||||
this.properties = List.of(NUM_RELATIONSHIPS, DISTRIBUTION_STRATEGY);
|
||||
}
|
||||
return properties;
|
||||
}
|
||||
|
@ -215,7 +207,7 @@ public class DistributeLoad extends AbstractProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
this.weightedRelationshipListRef.set(Collections.unmodifiableList(relationshipList));
|
||||
this.weightedRelationshipListRef.set(List.copyOf(relationshipList));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -291,8 +283,7 @@ public class DistributeLoad extends AbstractProcessor {
|
|||
final List<Relationship> relationshipList = DistributeLoad.this.weightedRelationshipListRef.get();
|
||||
final long counterValue = counter.getAndIncrement();
|
||||
final int idx = (int) (counterValue % relationshipList.size());
|
||||
final Relationship relationship = relationshipList.get(idx);
|
||||
return relationship;
|
||||
return relationshipList.get(idx);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,13 +16,9 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.SupportsBatching;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.SupportsBatching;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttributes;
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
|
@ -37,6 +33,9 @@ import org.apache.nifi.processor.Relationship;
|
|||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
@SupportsBatching
|
||||
@Tags({"test", "load", "duplicate"})
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
|
@ -60,19 +59,23 @@ public class DuplicateFlowFile extends AbstractProcessor {
|
|||
.addValidator(StandardValidators.NON_NEGATIVE_INTEGER_VALIDATOR)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(NUM_COPIES);
|
||||
|
||||
static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("The original FlowFile and all copies will be sent to this relationship")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return Collections.singleton(REL_SUCCESS);
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return Collections.singletonList(NUM_COPIES);
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,14 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.commons.codec.DecoderException;
|
||||
import org.apache.commons.codec.binary.Base32InputStream;
|
||||
import org.apache.commons.codec.binary.Base32OutputStream;
|
||||
|
@ -53,6 +45,14 @@ import org.apache.nifi.processors.standard.util.ValidatingBase64InputStream;
|
|||
import org.apache.nifi.stream.io.StreamUtils;
|
||||
import org.apache.nifi.util.StopWatch;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
|
@ -101,6 +101,13 @@ public class EncodeContent extends AbstractProcessor {
|
|||
.dependsOn(LINE_OUTPUT_MODE, LineOutputMode.MULTIPLE_LINES)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
MODE,
|
||||
ENCODING,
|
||||
LINE_OUTPUT_MODE,
|
||||
ENCODED_LINE_LENGTH
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("Any FlowFile that is successfully encoded or decoded will be routed to success")
|
||||
|
@ -111,27 +118,23 @@ public class EncodeContent extends AbstractProcessor {
|
|||
.description("Any FlowFile that cannot be encoded or decoded will be routed to failure")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private static final int BUFFER_SIZE = 8192;
|
||||
|
||||
private static final String LINE_FEED_SEPARATOR = "\n";
|
||||
|
||||
private static final List<PropertyDescriptor> properties = List.of(
|
||||
MODE,
|
||||
ENCODING,
|
||||
LINE_OUTPUT_MODE,
|
||||
ENCODED_LINE_LENGTH
|
||||
);
|
||||
|
||||
private static final Set<Relationship> relationships = Set.of(REL_SUCCESS, REL_FAILURE);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -141,9 +144,9 @@ public class EncodeContent extends AbstractProcessor {
|
|||
return;
|
||||
}
|
||||
|
||||
final boolean encode = context.getProperty(MODE).getValue().equals(EncodingMode.ENCODE.getValue());
|
||||
final EncodingType encoding = getEncodingType(context.getProperty(ENCODING).getValue());
|
||||
final boolean singleLineOutput = context.getProperty(LINE_OUTPUT_MODE).getValue().equals(LineOutputMode.SINGLE_LINE.getValue());
|
||||
final boolean encode = context.getProperty(MODE).asAllowableValue(EncodingMode.class).equals(EncodingMode.ENCODE);
|
||||
final EncodingType encoding = context.getProperty(ENCODING).asAllowableValue(EncodingType.class);
|
||||
final boolean singleLineOutput = context.getProperty(LINE_OUTPUT_MODE).asAllowableValue(LineOutputMode.class).equals(LineOutputMode.SINGLE_LINE);
|
||||
final int lineLength = singleLineOutput ? -1 : context.getProperty(ENCODED_LINE_LENGTH).evaluateAttributeExpressions(flowFile).asInteger();
|
||||
|
||||
final StreamCallback callback = getStreamCallback(encode, encoding, lineLength);
|
||||
|
@ -279,14 +282,4 @@ public class EncodeContent extends AbstractProcessor {
|
|||
out.flush();
|
||||
}
|
||||
}
|
||||
|
||||
private static EncodingType getEncodingType(final String encodingTypeValue) {
|
||||
if (EncodingType.BASE64.getValue().equals(encodingTypeValue)) {
|
||||
return EncodingType.BASE64;
|
||||
} else if (EncodingType.BASE32.getValue().equals(encodingTypeValue)) {
|
||||
return EncodingType.BASE32;
|
||||
} else {
|
||||
return EncodingType.HEXADECIMAL;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,10 +43,8 @@ import org.apache.nifi.processor.util.StandardValidators;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -55,7 +53,6 @@ import java.util.concurrent.TimeUnit;
|
|||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
|
@ -181,6 +178,16 @@ public class EnforceOrder extends AbstractProcessor {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
GROUP_IDENTIFIER,
|
||||
ORDER_ATTRIBUTE,
|
||||
INITIAL_ORDER,
|
||||
MAX_ORDER,
|
||||
BATCH_COUNT,
|
||||
WAIT_TIMEOUT,
|
||||
INACTIVE_TIMEOUT
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("A FlowFile with a matching order number will be routed to this relationship.")
|
||||
|
@ -206,37 +213,24 @@ public class EnforceOrder extends AbstractProcessor {
|
|||
.description("A FlowFile that has an order younger than current, which means arrived too late and skipped, will be routed to this relationship.")
|
||||
.build();
|
||||
|
||||
private final Set<Relationship> relationships;
|
||||
|
||||
public EnforceOrder() {
|
||||
final Set<Relationship> rels = new HashSet<>();
|
||||
rels.add(REL_SUCCESS);
|
||||
rels.add(REL_WAIT);
|
||||
rels.add(REL_OVERTOOK);
|
||||
rels.add(REL_FAILURE);
|
||||
rels.add(REL_SKIPPED);
|
||||
relationships = Collections.unmodifiableSet(rels);
|
||||
}
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_WAIT,
|
||||
REL_OVERTOOK,
|
||||
REL_FAILURE,
|
||||
REL_SKIPPED
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
descriptors.add(GROUP_IDENTIFIER);
|
||||
descriptors.add(ORDER_ATTRIBUTE);
|
||||
descriptors.add(INITIAL_ORDER);
|
||||
descriptors.add(MAX_ORDER);
|
||||
descriptors.add(BATCH_COUNT);
|
||||
descriptors.add(WAIT_TIMEOUT);
|
||||
descriptors.add(INACTIVE_TIMEOUT);
|
||||
return descriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(ValidationContext validationContext) {
|
||||
final List<ValidationResult> results = new ArrayList<>(super.customValidate(validationContext));
|
||||
|
@ -445,53 +439,55 @@ public class EnforceOrder extends AbstractProcessor {
|
|||
}
|
||||
|
||||
private void transferFlowFiles() {
|
||||
flowFileGroups.entrySet().stream().filter(entry -> !entry.getValue().isEmpty()).map(entry -> {
|
||||
// Sort flow files within each group.
|
||||
final List<FlowFile> groupedFlowFiles = entry.getValue();
|
||||
groupedFlowFiles.sort(Comparator.comparing(getOrder));
|
||||
return entry;
|
||||
}).forEach(entry -> {
|
||||
// Check current state.
|
||||
final String groupId = entry.getKey();
|
||||
final String stateKeyOrder = STATE_TARGET_ORDER.apply(groupId);
|
||||
final int previousTargetOrder = Integer.parseInt(groupStates.get(stateKeyOrder));
|
||||
final AtomicInteger targetOrder = new AtomicInteger(previousTargetOrder);
|
||||
final List<FlowFile> groupedFlowFiles = entry.getValue();
|
||||
final String maxOrderStr = groupStates.get(STATE_MAX_ORDER.apply(groupId));
|
||||
flowFileGroups.entrySet().stream()
|
||||
.filter(entry -> !entry.getValue().isEmpty())
|
||||
.peek(entry -> {
|
||||
// Sort flow files within each group.
|
||||
final List<FlowFile> groupedFlowFiles = entry.getValue();
|
||||
groupedFlowFiles.sort(Comparator.comparing(getOrder));
|
||||
})
|
||||
.forEach(entry -> {
|
||||
// Check current state.
|
||||
final String groupId = entry.getKey();
|
||||
final String stateKeyOrder = STATE_TARGET_ORDER.apply(groupId);
|
||||
final int previousTargetOrder = Integer.parseInt(groupStates.get(stateKeyOrder));
|
||||
final AtomicInteger targetOrder = new AtomicInteger(previousTargetOrder);
|
||||
final List<FlowFile> groupedFlowFiles = entry.getValue();
|
||||
final String maxOrderStr = groupStates.get(STATE_MAX_ORDER.apply(groupId));
|
||||
|
||||
groupedFlowFiles.forEach(f -> {
|
||||
final Integer order = getOrder.apply(f);
|
||||
final boolean isMaxOrder = !isBlank(maxOrderStr) && order.equals(Integer.parseInt(maxOrderStr));
|
||||
groupedFlowFiles.forEach(f -> {
|
||||
final Integer order = getOrder.apply(f);
|
||||
final boolean isMaxOrder = !isBlank(maxOrderStr) && order.equals(Integer.parseInt(maxOrderStr));
|
||||
|
||||
if (order == targetOrder.get()) {
|
||||
transferResult(f, REL_SUCCESS, null, null);
|
||||
if (!isMaxOrder) {
|
||||
// If max order is specified and this FlowFile has the max order, don't increment target anymore.
|
||||
targetOrder.incrementAndGet();
|
||||
if (order == targetOrder.get()) {
|
||||
transferResult(f, REL_SUCCESS, null, null);
|
||||
if (!isMaxOrder) {
|
||||
// If max order is specified and this FlowFile has the max order, don't increment target anymore.
|
||||
targetOrder.incrementAndGet();
|
||||
}
|
||||
|
||||
} else if (order > targetOrder.get()) {
|
||||
|
||||
if (now - Long.parseLong(f.getAttribute(ATTR_STARTED_AT)) > waitTimeoutMillis) {
|
||||
transferResult(f, REL_OVERTOOK, null, targetOrder.get());
|
||||
targetOrder.set(isMaxOrder ? order : order + 1);
|
||||
} else {
|
||||
transferResult(f, REL_WAIT, null, targetOrder.get());
|
||||
}
|
||||
|
||||
} else {
|
||||
final String msg = String.format("Skipped, FlowFile order was %d but current target is %d", order, targetOrder.get());
|
||||
logger.warn("{}. {}", msg, f);
|
||||
transferResult(f, REL_SKIPPED, msg, targetOrder.get());
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
if (previousTargetOrder != targetOrder.get()) {
|
||||
groupStates.put(stateKeyOrder, String.valueOf(targetOrder.get()));
|
||||
groupStates.put(STATE_UPDATED_AT.apply(groupId), String.valueOf(now));
|
||||
}
|
||||
|
||||
} else if (order > targetOrder.get()) {
|
||||
|
||||
if (now - Long.parseLong(f.getAttribute(ATTR_STARTED_AT)) > waitTimeoutMillis) {
|
||||
transferResult(f, REL_OVERTOOK, null, targetOrder.get());
|
||||
targetOrder.set(isMaxOrder ? order : order + 1);
|
||||
} else {
|
||||
transferResult(f, REL_WAIT, null, targetOrder.get());
|
||||
}
|
||||
|
||||
} else {
|
||||
final String msg = String.format("Skipped, FlowFile order was %d but current target is %d", order, targetOrder.get());
|
||||
logger.warn("{}. {}", msg, f);
|
||||
transferResult(f, REL_SKIPPED, msg, targetOrder.get());
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
if (previousTargetOrder != targetOrder.get()) {
|
||||
groupStates.put(stateKeyOrder, String.valueOf(targetOrder.get()));
|
||||
groupStates.put(STATE_UPDATED_AT.apply(groupId), String.valueOf(now));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private void transferResult(final FlowFile flowFile, final Relationship result, final String detail, final Integer expectedOrder) {
|
||||
|
@ -533,7 +529,7 @@ public class EnforceOrder extends AbstractProcessor {
|
|||
final List<String> inactiveGroups = groupStates.keySet().stream()
|
||||
.filter(k -> k.endsWith(STATE_SUFFIX_UPDATED_AT) && (now - Long.parseLong(groupStates.get(k)) > inactiveTimeout))
|
||||
.map(k -> k.substring(0, k.length() - STATE_SUFFIX_UPDATED_AT.length()))
|
||||
.collect(Collectors.toList());
|
||||
.toList();
|
||||
inactiveGroups.forEach(groupId -> {
|
||||
groupStates.remove(STATE_TARGET_ORDER.apply(groupId));
|
||||
groupStates.remove(STATE_UPDATED_AT.apply(groupId));
|
||||
|
|
|
@ -41,7 +41,6 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.DataUnit;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
|
||||
|
@ -52,7 +51,6 @@ import java.util.ArrayList;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Queue;
|
||||
|
@ -126,6 +124,14 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor {
|
|||
.dependsOn(DESTINATION, DESTINATION_ATTRIBUTE)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DESTINATION,
|
||||
RETURN_TYPE,
|
||||
PATH_NOT_FOUND,
|
||||
NULL_VALUE_DEFAULT_REPRESENTATION,
|
||||
MAX_STRING_LENGTH
|
||||
);
|
||||
|
||||
public static final Relationship REL_MATCH = new Relationship.Builder()
|
||||
.name("matched")
|
||||
.description("FlowFiles are routed to this relationship when the JsonPath is successfully evaluated and the FlowFile is modified as a result")
|
||||
|
@ -140,8 +146,11 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor {
|
|||
+ "FlowFile; for instance, if the FlowFile is not valid JSON")
|
||||
.build();
|
||||
|
||||
private Set<Relationship> relationships;
|
||||
private List<PropertyDescriptor> properties;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_MATCH,
|
||||
REL_NO_MATCH,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private final ConcurrentMap<String, JsonPath> cachedJsonPathMap = new ConcurrentHashMap<>();
|
||||
|
||||
|
@ -152,23 +161,6 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor {
|
|||
private volatile String nullDefaultValue;
|
||||
private volatile Configuration jsonPathConfiguration;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final Set<Relationship> rels = new HashSet<>();
|
||||
rels.add(REL_MATCH);
|
||||
rels.add(REL_NO_MATCH);
|
||||
rels.add(REL_FAILURE);
|
||||
this.relationships = Collections.unmodifiableSet(rels);
|
||||
|
||||
final List<PropertyDescriptor> props = new ArrayList<>();
|
||||
props.add(DESTINATION);
|
||||
props.add(RETURN_TYPE);
|
||||
props.add(PATH_NOT_FOUND);
|
||||
props.add(NULL_VALUE_DEFAULT_REPRESENTATION);
|
||||
props.add(MAX_STRING_LENGTH);
|
||||
this.properties = Collections.unmodifiableList(props);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(final ValidationContext context) {
|
||||
final List<ValidationResult> results = new ArrayList<>(super.customValidate(context));
|
||||
|
@ -194,12 +186,12 @@ public class EvaluateJsonPath extends AbstractJsonPathProcessor {
|
|||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,33 +16,8 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import static javax.xml.xpath.XPathConstants.NODESET;
|
||||
import static javax.xml.xpath.XPathConstants.STRING;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import javax.xml.namespace.QName;
|
||||
import javax.xml.transform.Source;
|
||||
import javax.xml.transform.dom.DOMSource;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
import javax.xml.xpath.XPathExpression;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
|
||||
import net.sf.saxon.xpath.XPathEvaluator;
|
||||
import net.sf.saxon.xpath.XPathFactoryImpl;
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
|
@ -65,7 +40,6 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processors.standard.xml.DocumentTypeAllowedDocumentProvider;
|
||||
|
@ -73,12 +47,33 @@ import org.apache.nifi.xml.processing.ProcessingException;
|
|||
import org.apache.nifi.xml.processing.parsers.StandardDocumentProvider;
|
||||
import org.apache.nifi.xml.processing.transform.StandardTransformProvider;
|
||||
import org.w3c.dom.Document;
|
||||
|
||||
import net.sf.saxon.xpath.XPathEvaluator;
|
||||
import net.sf.saxon.xpath.XPathFactoryImpl;
|
||||
import org.w3c.dom.Node;
|
||||
import org.w3c.dom.NodeList;
|
||||
|
||||
import javax.xml.namespace.QName;
|
||||
import javax.xml.transform.Source;
|
||||
import javax.xml.transform.dom.DOMSource;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
import javax.xml.xpath.XPathExpression;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import static javax.xml.xpath.XPathConstants.NODESET;
|
||||
import static javax.xml.xpath.XPathConstants.STRING;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
@Tags({"XML", "evaluate", "XPath"})
|
||||
|
@ -136,6 +131,12 @@ public class EvaluateXPath extends AbstractProcessor {
|
|||
.defaultValue("false")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DESTINATION,
|
||||
RETURN_TYPE,
|
||||
VALIDATE_DTD
|
||||
);
|
||||
|
||||
public static final Relationship REL_MATCH = new Relationship.Builder()
|
||||
.name("matched")
|
||||
.description("FlowFiles are routed to this relationship "
|
||||
|
@ -153,26 +154,14 @@ public class EvaluateXPath extends AbstractProcessor {
|
|||
+ "Type is 'nodeset' and the XPath evaluates to multiple nodes")
|
||||
.build();
|
||||
|
||||
private Set<Relationship> relationships;
|
||||
private List<PropertyDescriptor> properties;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_MATCH,
|
||||
REL_NO_MATCH,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private final AtomicReference<XPathFactory> factoryRef = new AtomicReference<>();
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_MATCH);
|
||||
relationships.add(REL_NO_MATCH);
|
||||
relationships.add(REL_FAILURE);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(DESTINATION);
|
||||
properties.add(RETURN_TYPE);
|
||||
properties.add(VALIDATE_DTD);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(final ValidationContext context) {
|
||||
final List<ValidationResult> results = new ArrayList<>(super.customValidate(context));
|
||||
|
@ -198,12 +187,12 @@ public class EvaluateXPath extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
@ -330,7 +319,7 @@ public class EvaluateXPath extends AbstractProcessor {
|
|||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
final StreamResult streamResult = new StreamResult(baos);
|
||||
transformProvider.transform(sourceNode, streamResult);
|
||||
xpathResults.put(entry.getKey(), new String(baos.toByteArray(), StandardCharsets.UTF_8));
|
||||
xpathResults.put(entry.getKey(), baos.toString(StandardCharsets.UTF_8));
|
||||
} catch (final ProcessingException e) {
|
||||
error.set(e);
|
||||
}
|
||||
|
|
|
@ -16,24 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import javax.xml.transform.dom.DOMSource;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
import net.sf.saxon.s9api.Processor;
|
||||
import net.sf.saxon.s9api.SaxonApiException;
|
||||
import net.sf.saxon.s9api.XQueryCompiler;
|
||||
|
@ -63,7 +45,6 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
@ -73,6 +54,23 @@ import org.apache.nifi.xml.processing.parsers.StandardDocumentProvider;
|
|||
import org.apache.nifi.xml.processing.transform.StandardTransformProvider;
|
||||
import org.w3c.dom.Document;
|
||||
|
||||
import javax.xml.transform.dom.DOMSource;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
@Tags({"XML", "evaluate", "XPath", "XQuery"})
|
||||
|
@ -151,6 +149,14 @@ public class EvaluateXQuery extends AbstractProcessor {
|
|||
.defaultValue("false")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DESTINATION,
|
||||
XML_OUTPUT_METHOD,
|
||||
XML_OUTPUT_OMIT_XML_DECLARATION,
|
||||
XML_OUTPUT_INDENT,
|
||||
VALIDATE_DTD
|
||||
);
|
||||
|
||||
public static final Relationship REL_MATCH = new Relationship.Builder()
|
||||
.name("matched")
|
||||
.description("FlowFiles are routed to this relationship when the XQuery is successfully evaluated and the FlowFile "
|
||||
|
@ -169,25 +175,11 @@ public class EvaluateXQuery extends AbstractProcessor {
|
|||
+ "the FlowFile.")
|
||||
.build();
|
||||
|
||||
private Set<Relationship> relationships;
|
||||
private List<PropertyDescriptor> properties;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_MATCH);
|
||||
relationships.add(REL_NO_MATCH);
|
||||
relationships.add(REL_FAILURE);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(DESTINATION);
|
||||
properties.add(XML_OUTPUT_METHOD);
|
||||
properties.add(XML_OUTPUT_OMIT_XML_DECLARATION);
|
||||
properties.add(XML_OUTPUT_INDENT);
|
||||
properties.add(VALIDATE_DTD);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
}
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_MATCH,
|
||||
REL_NO_MATCH,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(final ValidationContext context) {
|
||||
|
@ -211,12 +203,12 @@ public class EvaluateXQuery extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -303,7 +295,7 @@ public class EvaluateXQuery extends AbstractProcessor {
|
|||
xQueryResults.put(attributeName, value);
|
||||
}
|
||||
} else { // if (DESTINATION_CONTENT.equals(destination)){
|
||||
if (result.size() == 0) {
|
||||
if (result.isEmpty()) {
|
||||
logger.info("No XQuery results found {}", flowFile);
|
||||
session.transfer(flowFile, REL_NO_MATCH);
|
||||
continue flowFileLoop;
|
||||
|
|
|
@ -53,7 +53,6 @@ import java.io.InputStreamReader;
|
|||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -156,11 +155,23 @@ public class ExecuteProcess extends AbstractProcessor {
|
|||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
COMMAND,
|
||||
COMMAND_ARGUMENTS,
|
||||
BATCH_DURATION,
|
||||
REDIRECT_ERROR_STREAM,
|
||||
WORKING_DIR,
|
||||
ARG_DELIMITER,
|
||||
MIME_TYPE
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("All created FlowFiles are routed to this relationship")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
private volatile Process externalProcess;
|
||||
|
||||
private volatile ExecutorService executor;
|
||||
|
@ -170,20 +181,12 @@ public class ExecuteProcess extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return Collections.singleton(REL_SUCCESS);
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(COMMAND);
|
||||
properties.add(COMMAND_ARGUMENTS);
|
||||
properties.add(BATCH_DURATION);
|
||||
properties.add(REDIRECT_ERROR_STREAM);
|
||||
properties.add(WORKING_DIR);
|
||||
properties.add(ARG_DELIMITER);
|
||||
properties.add(MIME_TYPE);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -33,15 +33,11 @@ import org.apache.nifi.expression.ExpressionLanguageScope;
|
|||
import org.apache.nifi.flowfile.FlowFile;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.processors.standard.sql.DefaultAvroSqlWriter;
|
||||
import org.apache.nifi.processors.standard.sql.SqlWriter;
|
||||
import org.apache.nifi.util.db.JdbcCommon;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -147,27 +143,26 @@ public class ExecuteSQL extends AbstractExecuteSQL {
|
|||
.build();
|
||||
|
||||
public ExecuteSQL() {
|
||||
final Set<Relationship> r = new HashSet<>();
|
||||
r.add(REL_SUCCESS);
|
||||
r.add(REL_FAILURE);
|
||||
relationships = Collections.unmodifiableSet(r);
|
||||
|
||||
final List<PropertyDescriptor> pds = new ArrayList<>();
|
||||
pds.add(DBCP_SERVICE);
|
||||
pds.add(SQL_PRE_QUERY);
|
||||
pds.add(SQL_SELECT_QUERY);
|
||||
pds.add(SQL_POST_QUERY);
|
||||
pds.add(QUERY_TIMEOUT);
|
||||
pds.add(NORMALIZE_NAMES_FOR_AVRO);
|
||||
pds.add(USE_AVRO_LOGICAL_TYPES);
|
||||
pds.add(COMPRESSION_FORMAT);
|
||||
pds.add(DEFAULT_PRECISION);
|
||||
pds.add(DEFAULT_SCALE);
|
||||
pds.add(MAX_ROWS_PER_FLOW_FILE);
|
||||
pds.add(OUTPUT_BATCH_SIZE);
|
||||
pds.add(FETCH_SIZE);
|
||||
pds.add(AUTO_COMMIT);
|
||||
propDescriptors = Collections.unmodifiableList(pds);
|
||||
relationships = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
propDescriptors = List.of(
|
||||
DBCP_SERVICE,
|
||||
SQL_PRE_QUERY,
|
||||
SQL_SELECT_QUERY,
|
||||
SQL_POST_QUERY,
|
||||
QUERY_TIMEOUT,
|
||||
NORMALIZE_NAMES_FOR_AVRO,
|
||||
USE_AVRO_LOGICAL_TYPES,
|
||||
COMPRESSION_FORMAT,
|
||||
DEFAULT_PRECISION,
|
||||
DEFAULT_SCALE,
|
||||
MAX_ROWS_PER_FLOW_FILE,
|
||||
OUTPUT_BATCH_SIZE,
|
||||
FETCH_SIZE,
|
||||
AUTO_COMMIT
|
||||
);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -39,9 +39,7 @@ import org.apache.nifi.processors.standard.sql.SqlWriter;
|
|||
import org.apache.nifi.serialization.RecordSetWriterFactory;
|
||||
import org.apache.nifi.util.db.JdbcCommon;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -152,28 +150,31 @@ public class ExecuteSQLRecord extends AbstractExecuteSQL {
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
public ExecuteSQLRecord() {
|
||||
final Set<Relationship> r = new HashSet<>();
|
||||
r.add(REL_SUCCESS);
|
||||
r.add(REL_FAILURE);
|
||||
relationships = Collections.unmodifiableSet(r);
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DBCP_SERVICE,
|
||||
SQL_PRE_QUERY,
|
||||
SQL_SELECT_QUERY,
|
||||
SQL_POST_QUERY,
|
||||
QUERY_TIMEOUT,
|
||||
RECORD_WRITER_FACTORY,
|
||||
NORMALIZE_NAMES,
|
||||
USE_AVRO_LOGICAL_TYPES,
|
||||
DEFAULT_PRECISION,
|
||||
DEFAULT_SCALE,
|
||||
MAX_ROWS_PER_FLOW_FILE,
|
||||
OUTPUT_BATCH_SIZE,
|
||||
FETCH_SIZE,
|
||||
AUTO_COMMIT
|
||||
);
|
||||
|
||||
final List<PropertyDescriptor> pds = new ArrayList<>();
|
||||
pds.add(DBCP_SERVICE);
|
||||
pds.add(SQL_PRE_QUERY);
|
||||
pds.add(SQL_SELECT_QUERY);
|
||||
pds.add(SQL_POST_QUERY);
|
||||
pds.add(QUERY_TIMEOUT);
|
||||
pds.add(RECORD_WRITER_FACTORY);
|
||||
pds.add(NORMALIZE_NAMES);
|
||||
pds.add(USE_AVRO_LOGICAL_TYPES);
|
||||
pds.add(DEFAULT_PRECISION);
|
||||
pds.add(DEFAULT_SCALE);
|
||||
pds.add(MAX_ROWS_PER_FLOW_FILE);
|
||||
pds.add(OUTPUT_BATCH_SIZE);
|
||||
pds.add(FETCH_SIZE);
|
||||
pds.add(AUTO_COMMIT);
|
||||
propDescriptors = Collections.unmodifiableList(pds);
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
public ExecuteSQLRecord() {
|
||||
relationships = RELATIONSHIPS;
|
||||
propDescriptors = PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,16 +20,15 @@ import org.apache.commons.io.FileUtils;
|
|||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperties;
|
||||
import org.apache.nifi.annotation.behavior.Restricted;
|
||||
import org.apache.nifi.annotation.behavior.Restriction;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttributes;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
import org.apache.nifi.annotation.behavior.SupportsSensitiveDynamicProperties;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.Restricted;
|
||||
import org.apache.nifi.annotation.behavior.Restriction;
|
||||
import org.apache.nifi.annotation.behavior.SupportsBatching;
|
||||
import org.apache.nifi.annotation.behavior.SupportsSensitiveDynamicProperties;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttributes;
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
import org.apache.nifi.annotation.documentation.Tags;
|
||||
import org.apache.nifi.components.AllowableValue;
|
||||
|
@ -66,9 +65,7 @@ import java.io.OutputStream;
|
|||
import java.lang.ProcessBuilder.Redirect;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -188,8 +185,12 @@ public class ExecuteStreamCommand extends AbstractProcessor {
|
|||
.build();
|
||||
private final AtomicReference<Set<Relationship>> relationships = new AtomicReference<>();
|
||||
|
||||
private final static Set<Relationship> OUTPUT_STREAM_RELATIONSHIP_SET;
|
||||
private final static Set<Relationship> ATTRIBUTE_RELATIONSHIP_SET;
|
||||
private final static Set<Relationship> OUTPUT_STREAM_RELATIONSHIP_SET = Set.of(
|
||||
OUTPUT_STREAM_RELATIONSHIP,
|
||||
ORIGINAL_RELATIONSHIP,
|
||||
NONZERO_STATUS_RELATIONSHIP
|
||||
);
|
||||
private final static Set<Relationship> ATTRIBUTE_RELATIONSHIP_SET = Set.of(ORIGINAL_RELATIONSHIP);
|
||||
|
||||
private static final Pattern COMMAND_ARGUMENT_PATTERN = Pattern.compile("command\\.argument\\.(?<commandIndex>[0-9]+)$");
|
||||
|
||||
|
@ -286,33 +287,20 @@ public class ExecuteStreamCommand extends AbstractProcessor {
|
|||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES;
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
WORKING_DIR,
|
||||
EXECUTION_COMMAND,
|
||||
ARGUMENTS_STRATEGY,
|
||||
EXECUTION_ARGUMENTS,
|
||||
ARG_DELIMITER,
|
||||
IGNORE_STDIN,
|
||||
PUT_OUTPUT_IN_ATTRIBUTE,
|
||||
PUT_ATTRIBUTE_MAX_LENGTH,
|
||||
MIME_TYPE
|
||||
);
|
||||
|
||||
private static final String MASKED_ARGUMENT = "********";
|
||||
|
||||
static {
|
||||
List<PropertyDescriptor> props = new ArrayList<>();
|
||||
props.add(WORKING_DIR);
|
||||
props.add(EXECUTION_COMMAND);
|
||||
props.add(ARGUMENTS_STRATEGY);
|
||||
props.add(EXECUTION_ARGUMENTS);
|
||||
props.add(ARG_DELIMITER);
|
||||
props.add(IGNORE_STDIN);
|
||||
props.add(PUT_OUTPUT_IN_ATTRIBUTE);
|
||||
props.add(PUT_ATTRIBUTE_MAX_LENGTH);
|
||||
props.add(MIME_TYPE);
|
||||
PROPERTIES = Collections.unmodifiableList(props);
|
||||
|
||||
Set<Relationship> outputStreamRelationships = new HashSet<>();
|
||||
outputStreamRelationships.add(OUTPUT_STREAM_RELATIONSHIP);
|
||||
outputStreamRelationships.add(ORIGINAL_RELATIONSHIP);
|
||||
outputStreamRelationships.add(NONZERO_STATUS_RELATIONSHIP);
|
||||
OUTPUT_STREAM_RELATIONSHIP_SET = Collections.unmodifiableSet(outputStreamRelationships);
|
||||
|
||||
Set<Relationship> attributeRelationships = new HashSet<>();
|
||||
attributeRelationships.add(ORIGINAL_RELATIONSHIP);
|
||||
ATTRIBUTE_RELATIONSHIP_SET = Collections.unmodifiableSet(attributeRelationships);
|
||||
}
|
||||
|
||||
private ComponentLog logger;
|
||||
|
||||
@Override
|
||||
|
|
|
@ -56,9 +56,7 @@ import java.io.InputStream;
|
|||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -156,6 +154,16 @@ public class ExtractGrok extends AbstractProcessor {
|
|||
.defaultValue("false")
|
||||
.build();
|
||||
|
||||
private final static List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
GROK_EXPRESSION,
|
||||
GROK_PATTERNS,
|
||||
DESTINATION,
|
||||
CHARACTER_SET,
|
||||
MAX_BUFFER_SIZE,
|
||||
NAMED_CAPTURES_ONLY,
|
||||
KEEP_EMPTY_CAPTURES
|
||||
);
|
||||
|
||||
public static final Relationship REL_MATCH = new Relationship.Builder()
|
||||
.name("matched")
|
||||
.description("FlowFiles are routed to this relationship when the Grok Expression is successfully evaluated and the FlowFile is modified as a result")
|
||||
|
@ -166,39 +174,24 @@ public class ExtractGrok extends AbstractProcessor {
|
|||
.description("FlowFiles are routed to this relationship when no provided Grok Expression matches the content of the FlowFile")
|
||||
.build();
|
||||
|
||||
private final static List<PropertyDescriptor> descriptors;
|
||||
private final static Set<Relationship> relationships;
|
||||
private final static Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_MATCH,
|
||||
REL_NO_MATCH
|
||||
);
|
||||
|
||||
private volatile Grok grok;
|
||||
private final BlockingQueue<byte[]> bufferQueue = new LinkedBlockingQueue<>();
|
||||
|
||||
private final AtomicBoolean keepEmptyCaputures = new AtomicBoolean(true);
|
||||
|
||||
static {
|
||||
final Set<Relationship> _relationships = new HashSet<>();
|
||||
_relationships.add(REL_MATCH);
|
||||
_relationships.add(REL_NO_MATCH);
|
||||
relationships = Collections.unmodifiableSet(_relationships);
|
||||
|
||||
final List<PropertyDescriptor> _descriptors = new ArrayList<>();
|
||||
_descriptors.add(GROK_EXPRESSION);
|
||||
_descriptors.add(GROK_PATTERNS);
|
||||
_descriptors.add(DESTINATION);
|
||||
_descriptors.add(CHARACTER_SET);
|
||||
_descriptors.add(MAX_BUFFER_SIZE);
|
||||
_descriptors.add(NAMED_CAPTURES_ONLY);
|
||||
_descriptors.add(KEEP_EMPTY_CAPTURES);
|
||||
descriptors = Collections.unmodifiableList(_descriptors);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return descriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@OnStopped
|
||||
|
|
|
@ -42,8 +42,6 @@ import org.apache.nifi.serialization.RecordReaderFactory;
|
|||
import org.apache.nifi.serialization.record.RecordSchema;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -79,6 +77,11 @@ public class ExtractRecordSchema extends AbstractProcessor {
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
RECORD_READER,
|
||||
SCHEMA_CACHE_SIZE
|
||||
);
|
||||
|
||||
static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("FlowFiles whose record schemas are successfully extracted will be routed to this relationship")
|
||||
|
@ -89,21 +92,21 @@ public class ExtractRecordSchema extends AbstractProcessor {
|
|||
+ "the FlowFile will be routed to this relationship")
|
||||
.build();
|
||||
|
||||
static final List<PropertyDescriptor> properties = Arrays.asList(RECORD_READER, SCHEMA_CACHE_SIZE);
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private LoadingCache<RecordSchema, String> avroSchemaTextCache;
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
relationships.add(REL_FAILURE);
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
|
|
@ -231,6 +231,24 @@ public class ExtractText extends AbstractProcessor {
|
|||
.defaultValue("false")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
CHARACTER_SET,
|
||||
MAX_BUFFER_SIZE,
|
||||
MAX_CAPTURE_GROUP_LENGTH,
|
||||
CANON_EQ,
|
||||
CASE_INSENSITIVE,
|
||||
COMMENTS,
|
||||
DOTALL,
|
||||
LITERAL,
|
||||
MULTILINE,
|
||||
UNICODE_CASE,
|
||||
UNICODE_CHARACTER_CLASS,
|
||||
UNIX_LINES,
|
||||
INCLUDE_CAPTURE_GROUP_ZERO,
|
||||
ENABLE_REPEATING_CAPTURE_GROUP,
|
||||
ENABLE_NAMED_GROUPS
|
||||
);
|
||||
|
||||
public static final Relationship REL_MATCH = new Relationship.Builder()
|
||||
.name("matched")
|
||||
.description("FlowFiles are routed to this relationship when the Regular Expression is successfully evaluated and the FlowFile is modified as a result")
|
||||
|
@ -241,24 +259,10 @@ public class ExtractText extends AbstractProcessor {
|
|||
.description("FlowFiles are routed to this relationship when no provided Regular Expression matches the content of the FlowFile")
|
||||
.build();
|
||||
|
||||
private final Set<Relationship> relationships = Set.of(REL_MATCH,
|
||||
REL_NO_MATCH);
|
||||
|
||||
private final List<PropertyDescriptor> properties = List.of(CHARACTER_SET,
|
||||
MAX_BUFFER_SIZE,
|
||||
MAX_CAPTURE_GROUP_LENGTH,
|
||||
CANON_EQ,
|
||||
CASE_INSENSITIVE,
|
||||
COMMENTS,
|
||||
DOTALL,
|
||||
LITERAL,
|
||||
MULTILINE,
|
||||
UNICODE_CASE,
|
||||
UNICODE_CHARACTER_CLASS,
|
||||
UNIX_LINES,
|
||||
INCLUDE_CAPTURE_GROUP_ZERO,
|
||||
ENABLE_REPEATING_CAPTURE_GROUP,
|
||||
ENABLE_NAMED_GROUPS);
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_MATCH,
|
||||
REL_NO_MATCH
|
||||
);
|
||||
|
||||
private final BlockingQueue<byte[]> bufferQueue = new LinkedBlockingQueue<>();
|
||||
private final AtomicReference<Map<String, Pattern>> compiledPattersMapRef = new AtomicReference<>();
|
||||
|
@ -266,12 +270,12 @@ public class ExtractText extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -481,7 +485,7 @@ public class ExtractText extends AbstractProcessor {
|
|||
}
|
||||
|
||||
int getCompileFlags(ProcessContext context) {
|
||||
int flags = (context.getProperty(UNIX_LINES).asBoolean() ? Pattern.UNIX_LINES : 0)
|
||||
return (context.getProperty(UNIX_LINES).asBoolean() ? Pattern.UNIX_LINES : 0)
|
||||
| (context.getProperty(CASE_INSENSITIVE).asBoolean() ? Pattern.CASE_INSENSITIVE : 0)
|
||||
| (context.getProperty(COMMENTS).asBoolean() ? Pattern.COMMENTS : 0)
|
||||
| (context.getProperty(MULTILINE).asBoolean() ? Pattern.MULTILINE : 0)
|
||||
|
@ -490,6 +494,5 @@ public class ExtractText extends AbstractProcessor {
|
|||
| (context.getProperty(UNICODE_CASE).asBoolean() ? Pattern.UNICODE_CASE : 0)
|
||||
| (context.getProperty(CANON_EQ).asBoolean() ? Pattern.CANON_EQ : 0)
|
||||
| (context.getProperty(UNICODE_CHARACTER_CLASS).asBoolean() ? Pattern.UNICODE_CHARACTER_CLASS : 0);
|
||||
return flags;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,8 +34,8 @@ import org.apache.nifi.distributed.cache.client.Serializer;
|
|||
import org.apache.nifi.distributed.cache.client.exception.DeserializationException;
|
||||
import org.apache.nifi.distributed.cache.client.exception.SerializationException;
|
||||
import org.apache.nifi.expression.AttributeExpression;
|
||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||
import org.apache.nifi.expression.AttributeExpression.ResultType;
|
||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||
import org.apache.nifi.flowfile.FlowFile;
|
||||
import org.apache.nifi.logging.ComponentLog;
|
||||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
|
@ -51,13 +51,11 @@ import java.nio.charset.StandardCharsets;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@SupportsBatching
|
||||
@Tags({"map", "cache", "fetch", "distributed"})
|
||||
|
@ -75,14 +73,14 @@ import java.util.stream.Collectors;
|
|||
"org.apache.nifi.processors.standard.PutDistributedMapCache"})
|
||||
public class FetchDistributedMapCache extends AbstractProcessor {
|
||||
|
||||
public static final PropertyDescriptor PROP_DISTRIBUTED_CACHE_SERVICE = new PropertyDescriptor.Builder()
|
||||
public static final PropertyDescriptor DISTRIBUTED_CACHE_SERVICE = new PropertyDescriptor.Builder()
|
||||
.name("Distributed Cache Service")
|
||||
.description("The Controller Service that is used to get the cached values.")
|
||||
.required(true)
|
||||
.identifiesControllerService(DistributedMapCacheClient.class)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PROP_CACHE_ENTRY_IDENTIFIER = new PropertyDescriptor.Builder()
|
||||
public static final PropertyDescriptor CACHE_ENTRY_IDENTIFIER = new PropertyDescriptor.Builder()
|
||||
.name("Cache Entry Identifier")
|
||||
.description("A comma-delimited list of FlowFile attributes, or the results of Attribute Expression Language statements, which will be evaluated "
|
||||
+ "against a FlowFile in order to determine the value(s) used to identify duplicates; it is these values that are cached. NOTE: Only a single "
|
||||
|
@ -94,7 +92,7 @@ public class FetchDistributedMapCache extends AbstractProcessor {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE = new PropertyDescriptor.Builder()
|
||||
public static final PropertyDescriptor PUT_CACHE_VALUE_IN_ATTRIBUTE = new PropertyDescriptor.Builder()
|
||||
.name("Put Cache Value In Attribute")
|
||||
.description("If set, the cache value received will be put into an attribute of the FlowFile instead of a the content of the"
|
||||
+ "FlowFile. The attribute key to put to is determined by evaluating value of this property. If multiple Cache Entry Identifiers are selected, "
|
||||
|
@ -103,7 +101,7 @@ public class FetchDistributedMapCache extends AbstractProcessor {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PROP_PUT_ATTRIBUTE_MAX_LENGTH = new PropertyDescriptor.Builder()
|
||||
public static final PropertyDescriptor PUT_ATTRIBUTE_MAX_LENGTH = new PropertyDescriptor.Builder()
|
||||
.name("Max Length To Put In Attribute")
|
||||
.description("If routing the cache value to an attribute of the FlowFile (by setting the \"Put Cache Value in attribute\" "
|
||||
+ "property), the number of characters put to the attribute value will be at most this amount. This is important because "
|
||||
|
@ -113,7 +111,7 @@ public class FetchDistributedMapCache extends AbstractProcessor {
|
|||
.defaultValue("256")
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor PROP_CHARACTER_SET = new PropertyDescriptor.Builder()
|
||||
public static final PropertyDescriptor CHARACTER_SET = new PropertyDescriptor.Builder()
|
||||
.name("Character Set")
|
||||
.description("The Character Set in which the cached value is encoded. This will only be used when routing to an attribute.")
|
||||
.required(false)
|
||||
|
@ -121,6 +119,14 @@ public class FetchDistributedMapCache extends AbstractProcessor {
|
|||
.defaultValue("UTF-8")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
CACHE_ENTRY_IDENTIFIER,
|
||||
DISTRIBUTED_CACHE_SERVICE,
|
||||
PUT_CACHE_VALUE_IN_ATTRIBUTE,
|
||||
PUT_ATTRIBUTE_MAX_LENGTH,
|
||||
CHARACTER_SET
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("If the cache was successfully communicated with it will be routed to this relationship")
|
||||
|
@ -133,40 +139,31 @@ public class FetchDistributedMapCache extends AbstractProcessor {
|
|||
.name("failure")
|
||||
.description("If unable to communicate with the cache or if the cache entry is evaluated to be blank, the FlowFile will be penalized and routed to this relationship")
|
||||
.build();
|
||||
private final Set<Relationship> relationships;
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_NOT_FOUND,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private final Serializer<String> keySerializer = new StringSerializer();
|
||||
private final Deserializer<byte[]> valueDeserializer = new CacheValueDeserializer();
|
||||
|
||||
public FetchDistributedMapCache() {
|
||||
final Set<Relationship> rels = new HashSet<>();
|
||||
rels.add(REL_SUCCESS);
|
||||
rels.add(REL_NOT_FOUND);
|
||||
rels.add(REL_FAILURE);
|
||||
relationships = Collections.unmodifiableSet(rels);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
descriptors.add(PROP_CACHE_ENTRY_IDENTIFIER);
|
||||
descriptors.add(PROP_DISTRIBUTED_CACHE_SERVICE);
|
||||
descriptors.add(PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE);
|
||||
descriptors.add(PROP_PUT_ATTRIBUTE_MAX_LENGTH);
|
||||
descriptors.add(PROP_CHARACTER_SET);
|
||||
return descriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(ValidationContext validationContext) {
|
||||
List<ValidationResult> results = new ArrayList<>(super.customValidate(validationContext));
|
||||
|
||||
PropertyValue cacheEntryIdentifier = validationContext.getProperty(PROP_CACHE_ENTRY_IDENTIFIER);
|
||||
PropertyValue cacheEntryIdentifier = validationContext.getProperty(CACHE_ENTRY_IDENTIFIER);
|
||||
boolean elPresent = false;
|
||||
try {
|
||||
elPresent = cacheEntryIdentifier.isExpressionLanguagePresent();
|
||||
|
@ -180,7 +177,7 @@ public class FetchDistributedMapCache extends AbstractProcessor {
|
|||
// or a single EL statement with commas inside it but that evaluates to a single item.
|
||||
results.add(new ValidationResult.Builder().valid(true).explanation("Contains Expression Language").build());
|
||||
} else {
|
||||
if (!validationContext.getProperty(FetchDistributedMapCache.PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE).isSet()) {
|
||||
if (!validationContext.getProperty(FetchDistributedMapCache.PUT_CACHE_VALUE_IN_ATTRIBUTE).isSet()) {
|
||||
String identifierString = cacheEntryIdentifier.getValue();
|
||||
if (identifierString.contains(",")) {
|
||||
results.add(new ValidationResult.Builder().valid(false)
|
||||
|
@ -199,7 +196,7 @@ public class FetchDistributedMapCache extends AbstractProcessor {
|
|||
}
|
||||
|
||||
final ComponentLog logger = getLogger();
|
||||
final String cacheKey = context.getProperty(PROP_CACHE_ENTRY_IDENTIFIER).evaluateAttributeExpressions(flowFile).getValue();
|
||||
final String cacheKey = context.getProperty(CACHE_ENTRY_IDENTIFIER).evaluateAttributeExpressions(flowFile).getValue();
|
||||
// This block retains the previous behavior when only one Cache Entry Identifier was allowed, so as not to change the expected error message
|
||||
if (StringUtils.isBlank(cacheKey)) {
|
||||
logger.error("FlowFile {} has no attribute for given Cache Entry Identifier", flowFile);
|
||||
|
@ -207,7 +204,7 @@ public class FetchDistributedMapCache extends AbstractProcessor {
|
|||
session.transfer(flowFile, REL_FAILURE);
|
||||
return;
|
||||
}
|
||||
List<String> cacheKeys = Arrays.stream(cacheKey.split(",")).filter(path -> !StringUtils.isEmpty(path)).map(String::trim).collect(Collectors.toList());
|
||||
List<String> cacheKeys = Arrays.stream(cacheKey.split(",")).filter(path -> !StringUtils.isEmpty(path)).map(String::trim).toList();
|
||||
for (int i = 0; i < cacheKeys.size(); i++) {
|
||||
if (StringUtils.isBlank(cacheKeys.get(i))) {
|
||||
// Log first missing identifier, route to failure, and return
|
||||
|
@ -218,14 +215,14 @@ public class FetchDistributedMapCache extends AbstractProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
final DistributedMapCacheClient cache = context.getProperty(PROP_DISTRIBUTED_CACHE_SERVICE).asControllerService(DistributedMapCacheClient.class);
|
||||
final DistributedMapCacheClient cache = context.getProperty(DISTRIBUTED_CACHE_SERVICE).asControllerService(DistributedMapCacheClient.class);
|
||||
|
||||
try {
|
||||
final Map<String, byte[]> cacheValues;
|
||||
final boolean singleKey = cacheKeys.size() == 1;
|
||||
if (singleKey) {
|
||||
cacheValues = new HashMap<>(1);
|
||||
cacheValues.put(cacheKeys.get(0), cache.get(cacheKey, keySerializer, valueDeserializer));
|
||||
cacheValues.put(cacheKeys.getFirst(), cache.get(cacheKey, keySerializer, valueDeserializer));
|
||||
} else {
|
||||
cacheValues = cache.subMap(new HashSet<>(cacheKeys), keySerializer, valueDeserializer);
|
||||
}
|
||||
|
@ -238,16 +235,16 @@ public class FetchDistributedMapCache extends AbstractProcessor {
|
|||
notFound = true;
|
||||
break;
|
||||
} else {
|
||||
boolean putInAttribute = context.getProperty(PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE).isSet();
|
||||
boolean putInAttribute = context.getProperty(PUT_CACHE_VALUE_IN_ATTRIBUTE).isSet();
|
||||
if (putInAttribute) {
|
||||
String attributeName = context.getProperty(PROP_PUT_CACHE_VALUE_IN_ATTRIBUTE).evaluateAttributeExpressions(flowFile).getValue();
|
||||
String attributeName = context.getProperty(PUT_CACHE_VALUE_IN_ATTRIBUTE).evaluateAttributeExpressions(flowFile).getValue();
|
||||
if (!singleKey) {
|
||||
// Append key to attribute name if multiple keys
|
||||
attributeName += "." + cacheValueEntry.getKey();
|
||||
}
|
||||
String attributeValue = new String(cacheValue, context.getProperty(PROP_CHARACTER_SET).getValue());
|
||||
String attributeValue = new String(cacheValue, context.getProperty(CHARACTER_SET).getValue());
|
||||
|
||||
int maxLength = context.getProperty(PROP_PUT_ATTRIBUTE_MAX_LENGTH).asInteger();
|
||||
int maxLength = context.getProperty(PUT_ATTRIBUTE_MAX_LENGTH).asInteger();
|
||||
if (maxLength < attributeValue.length()) {
|
||||
attributeValue = attributeValue.substring(0, maxLength);
|
||||
}
|
||||
|
|
|
@ -17,9 +17,6 @@
|
|||
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
|
@ -34,8 +31,12 @@ import org.apache.nifi.components.ValidationContext;
|
|||
import org.apache.nifi.components.ValidationResult;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.util.file.transfer.FetchFileTransfer;
|
||||
import org.apache.nifi.processors.standard.util.FTPTransfer;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
||||
import org.apache.nifi.processors.standard.util.FTPTransfer;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
// Note that we do not use @SupportsBatching annotation. This processor cannot support batching because it must ensure that session commits happen before remote files are deleted.
|
||||
|
||||
|
@ -83,34 +84,37 @@ import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
|||
)
|
||||
public class FetchFTP extends FetchFileTransfer {
|
||||
|
||||
private static final PropertyDescriptor PORT =
|
||||
new PropertyDescriptor.Builder().fromPropertyDescriptor(UNDEFAULTED_PORT).defaultValue("21").build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
HOSTNAME,
|
||||
PORT,
|
||||
USERNAME,
|
||||
FTPTransfer.PASSWORD,
|
||||
REMOTE_FILENAME,
|
||||
COMPLETION_STRATEGY,
|
||||
MOVE_DESTINATION_DIR,
|
||||
MOVE_CREATE_DIRECTORY,
|
||||
FTPTransfer.CONNECTION_TIMEOUT,
|
||||
FTPTransfer.DATA_TIMEOUT,
|
||||
FTPTransfer.USE_COMPRESSION,
|
||||
FTPTransfer.CONNECTION_MODE,
|
||||
FTPTransfer.TRANSFER_MODE,
|
||||
FTPTransfer.PROXY_CONFIGURATION_SERVICE,
|
||||
FTPTransfer.PROXY_TYPE,
|
||||
FTPTransfer.PROXY_HOST,
|
||||
FTPTransfer.PROXY_PORT,
|
||||
FTPTransfer.HTTP_PROXY_USERNAME,
|
||||
FTPTransfer.HTTP_PROXY_PASSWORD,
|
||||
FTPTransfer.BUFFER_SIZE,
|
||||
FILE_NOT_FOUND_LOG_LEVEL,
|
||||
FTPTransfer.UTF8_ENCODING
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final PropertyDescriptor port = new PropertyDescriptor.Builder().fromPropertyDescriptor(UNDEFAULTED_PORT).defaultValue("21").build();
|
||||
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(HOSTNAME);
|
||||
properties.add(port);
|
||||
properties.add(USERNAME);
|
||||
properties.add(FTPTransfer.PASSWORD);
|
||||
properties.add(REMOTE_FILENAME);
|
||||
properties.add(COMPLETION_STRATEGY);
|
||||
properties.add(MOVE_DESTINATION_DIR);
|
||||
properties.add(MOVE_CREATE_DIRECTORY);
|
||||
properties.add(FTPTransfer.CONNECTION_TIMEOUT);
|
||||
properties.add(FTPTransfer.DATA_TIMEOUT);
|
||||
properties.add(FTPTransfer.USE_COMPRESSION);
|
||||
properties.add(FTPTransfer.CONNECTION_MODE);
|
||||
properties.add(FTPTransfer.TRANSFER_MODE);
|
||||
properties.add(FTPTransfer.PROXY_CONFIGURATION_SERVICE);
|
||||
properties.add(FTPTransfer.PROXY_TYPE);
|
||||
properties.add(FTPTransfer.PROXY_HOST);
|
||||
properties.add(FTPTransfer.PROXY_PORT);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_USERNAME);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_PASSWORD);
|
||||
properties.add(FTPTransfer.BUFFER_SIZE);
|
||||
properties.add(FILE_NOT_FOUND_LOG_LEVEL);
|
||||
properties.add(FTPTransfer.UTF8_ENCODING);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -53,7 +53,6 @@ import java.nio.file.Path;
|
|||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
@ -177,6 +176,15 @@ public class FetchFile extends AbstractProcessor {
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
FILENAME,
|
||||
COMPLETION_STRATEGY,
|
||||
MOVE_DESTINATION_DIR,
|
||||
CONFLICT_STRATEGY,
|
||||
FILE_NOT_FOUND_LOG_LEVEL,
|
||||
PERM_DENIED_LOG_LEVEL
|
||||
);
|
||||
|
||||
static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("Any FlowFile that is successfully fetched from the file system will be transferred to this Relationship.")
|
||||
|
@ -195,26 +203,21 @@ public class FetchFile extends AbstractProcessor {
|
|||
"Any FlowFile that could not be fetched from the file system for any reason other than insufficient permissions or the file not existing will be transferred to this Relationship.")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_NOT_FOUND,
|
||||
REL_PERMISSION_DENIED,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(FILENAME);
|
||||
properties.add(COMPLETION_STRATEGY);
|
||||
properties.add(MOVE_DESTINATION_DIR);
|
||||
properties.add(CONFLICT_STRATEGY);
|
||||
properties.add(FILE_NOT_FOUND_LOG_LEVEL);
|
||||
properties.add(PERM_DENIED_LOG_LEVEL);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
relationships.add(REL_NOT_FOUND);
|
||||
relationships.add(REL_PERMISSION_DENIED);
|
||||
relationships.add(REL_FAILURE);
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -17,9 +17,6 @@
|
|||
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
|
@ -34,10 +31,14 @@ import org.apache.nifi.components.ValidationContext;
|
|||
import org.apache.nifi.components.ValidationResult;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.util.file.transfer.FetchFileTransfer;
|
||||
import org.apache.nifi.processors.standard.util.FTPTransfer;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
||||
import org.apache.nifi.processors.standard.util.FTPTransfer;
|
||||
import org.apache.nifi.processors.standard.util.SFTPTransfer;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
// Note that we do not use @SupportsBatching annotation. This processor cannot support batching because it must ensure that session commits happen before remote files are deleted.
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
@Tags({"sftp", "get", "retrieve", "files", "fetch", "remote", "ingest", "source", "input"})
|
||||
|
@ -83,48 +84,52 @@ import org.apache.nifi.processors.standard.util.SFTPTransfer;
|
|||
)
|
||||
public class FetchSFTP extends FetchFileTransfer {
|
||||
|
||||
private static final PropertyDescriptor PORT =
|
||||
new PropertyDescriptor.Builder().fromPropertyDescriptor(UNDEFAULTED_PORT).defaultValue("22").build();
|
||||
|
||||
private static final PropertyDescriptor DISABLE_DIRECTORY_LISTING = new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(SFTPTransfer.DISABLE_DIRECTORY_LISTING)
|
||||
.description(String.format("Control how '%s' is created when '%s' is '%s' and '%s' is enabled. %s",
|
||||
MOVE_DESTINATION_DIR.getDisplayName(),
|
||||
COMPLETION_STRATEGY.getDisplayName(),
|
||||
COMPLETION_MOVE.getDisplayName(),
|
||||
MOVE_CREATE_DIRECTORY.getDisplayName(),
|
||||
SFTPTransfer.DISABLE_DIRECTORY_LISTING.getDescription())).build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
HOSTNAME,
|
||||
PORT,
|
||||
USERNAME,
|
||||
SFTPTransfer.PASSWORD,
|
||||
SFTPTransfer.PRIVATE_KEY_PATH,
|
||||
SFTPTransfer.PRIVATE_KEY_PASSPHRASE,
|
||||
REMOTE_FILENAME,
|
||||
COMPLETION_STRATEGY,
|
||||
MOVE_DESTINATION_DIR,
|
||||
MOVE_CREATE_DIRECTORY,
|
||||
DISABLE_DIRECTORY_LISTING,
|
||||
SFTPTransfer.CONNECTION_TIMEOUT,
|
||||
SFTPTransfer.DATA_TIMEOUT,
|
||||
SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT,
|
||||
SFTPTransfer.HOST_KEY_FILE,
|
||||
SFTPTransfer.STRICT_HOST_KEY_CHECKING,
|
||||
SFTPTransfer.USE_COMPRESSION,
|
||||
SFTPTransfer.PROXY_CONFIGURATION_SERVICE,
|
||||
FTPTransfer.PROXY_TYPE,
|
||||
FTPTransfer.PROXY_HOST,
|
||||
FTPTransfer.PROXY_PORT,
|
||||
FTPTransfer.HTTP_PROXY_USERNAME,
|
||||
FTPTransfer.HTTP_PROXY_PASSWORD,
|
||||
FILE_NOT_FOUND_LOG_LEVEL,
|
||||
SFTPTransfer.CIPHERS_ALLOWED,
|
||||
SFTPTransfer.KEY_ALGORITHMS_ALLOWED,
|
||||
SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED,
|
||||
SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final PropertyDescriptor port = new PropertyDescriptor.Builder().fromPropertyDescriptor(UNDEFAULTED_PORT).defaultValue("22").build();
|
||||
final PropertyDescriptor disableDirectoryListing = new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(SFTPTransfer.DISABLE_DIRECTORY_LISTING)
|
||||
.description(String.format("Control how '%s' is created when '%s' is '%s' and '%s' is enabled. %s",
|
||||
MOVE_DESTINATION_DIR.getDisplayName(),
|
||||
COMPLETION_STRATEGY.getDisplayName(),
|
||||
COMPLETION_MOVE.getDisplayName(),
|
||||
MOVE_CREATE_DIRECTORY.getDisplayName(),
|
||||
SFTPTransfer.DISABLE_DIRECTORY_LISTING.getDescription())).build();
|
||||
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(HOSTNAME);
|
||||
properties.add(port);
|
||||
properties.add(USERNAME);
|
||||
properties.add(SFTPTransfer.PASSWORD);
|
||||
properties.add(SFTPTransfer.PRIVATE_KEY_PATH);
|
||||
properties.add(SFTPTransfer.PRIVATE_KEY_PASSPHRASE);
|
||||
properties.add(REMOTE_FILENAME);
|
||||
properties.add(COMPLETION_STRATEGY);
|
||||
properties.add(MOVE_DESTINATION_DIR);
|
||||
properties.add(MOVE_CREATE_DIRECTORY);
|
||||
properties.add(disableDirectoryListing);
|
||||
properties.add(SFTPTransfer.CONNECTION_TIMEOUT);
|
||||
properties.add(SFTPTransfer.DATA_TIMEOUT);
|
||||
properties.add(SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT);
|
||||
properties.add(SFTPTransfer.HOST_KEY_FILE);
|
||||
properties.add(SFTPTransfer.STRICT_HOST_KEY_CHECKING);
|
||||
properties.add(SFTPTransfer.USE_COMPRESSION);
|
||||
properties.add(SFTPTransfer.PROXY_CONFIGURATION_SERVICE);
|
||||
properties.add(FTPTransfer.PROXY_TYPE);
|
||||
properties.add(FTPTransfer.PROXY_HOST);
|
||||
properties.add(FTPTransfer.PROXY_PORT);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_USERNAME);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_PASSWORD);
|
||||
properties.add(FILE_NOT_FOUND_LOG_LEVEL);
|
||||
properties.add(SFTPTransfer.CIPHERS_ALLOWED);
|
||||
properties.add(SFTPTransfer.KEY_ALGORITHMS_ALLOWED);
|
||||
properties.add(SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED);
|
||||
properties.add(SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -71,7 +71,7 @@ public class FilterAttribute extends AbstractProcessor {
|
|||
.description("All successful FlowFiles are routed to this relationship")
|
||||
.build();
|
||||
|
||||
private final static Set<Relationship> relationships = Set.of(REL_SUCCESS);
|
||||
private final static Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
public static final PropertyDescriptor FILTER_MODE = new PropertyDescriptor.Builder()
|
||||
.name("Filter Mode")
|
||||
|
@ -113,21 +113,25 @@ public class FilterAttribute extends AbstractProcessor {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
|
||||
.build();
|
||||
|
||||
private final static String DELIMITER_VALUE = ",";
|
||||
private final static List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
FILTER_MODE,
|
||||
MATCHING_STRATEGY,
|
||||
ATTRIBUTE_ENUMERATION,
|
||||
ATTRIBUTE_PATTERN
|
||||
);
|
||||
|
||||
private final static List<PropertyDescriptor> properties =
|
||||
List.of(FILTER_MODE, MATCHING_STRATEGY, ATTRIBUTE_ENUMERATION, ATTRIBUTE_PATTERN);
|
||||
private final static String DELIMITER_VALUE = ",";
|
||||
|
||||
private volatile Predicate<String> cachedMatchingPredicate;
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -24,12 +24,12 @@ import com.github.wnameless.json.flattener.PrintMode;
|
|||
import com.github.wnameless.json.unflattener.JsonUnflattener;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.text.StringEscapeUtils;
|
||||
import org.apache.nifi.annotation.behavior.SideEffectFree;
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
import org.apache.nifi.annotation.documentation.Tags;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.SideEffectFree;
|
||||
import org.apache.nifi.annotation.behavior.SupportsBatching;
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
import org.apache.nifi.annotation.documentation.Tags;
|
||||
import org.apache.nifi.components.AllowableValue;
|
||||
import org.apache.nifi.components.PropertyDescriptor;
|
||||
import org.apache.nifi.components.ValidationResult;
|
||||
|
@ -39,16 +39,12 @@ import org.apache.nifi.flowfile.FlowFile;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -162,6 +158,15 @@ public class FlattenJson extends AbstractProcessor {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
SEPARATOR,
|
||||
FLATTEN_MODE,
|
||||
IGNORE_RESERVED_CHARACTERS,
|
||||
RETURN_TYPE,
|
||||
CHARACTER_SET,
|
||||
PRETTY_PRINT
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.description("Successfully flattened/unflattened files go to this relationship.")
|
||||
.name("success")
|
||||
|
@ -172,35 +177,19 @@ public class FlattenJson extends AbstractProcessor {
|
|||
.name("failure")
|
||||
.build();
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
private Set<Relationship> relationships;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
List<PropertyDescriptor> props = new ArrayList<>();
|
||||
props.add(SEPARATOR);
|
||||
props.add(FLATTEN_MODE);
|
||||
props.add(IGNORE_RESERVED_CHARACTERS);
|
||||
props.add(RETURN_TYPE);
|
||||
props.add(CHARACTER_SET);
|
||||
props.add(PRETTY_PRINT);
|
||||
properties = Collections.unmodifiableList(props);
|
||||
|
||||
Set<Relationship> rels = new HashSet<>();
|
||||
rels.add(REL_SUCCESS);
|
||||
rels.add(REL_FAILURE);
|
||||
|
||||
relationships = Collections.unmodifiableSet(rels);
|
||||
}
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -34,10 +34,7 @@ import org.apache.nifi.processor.Relationship;
|
|||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processors.standard.enrichment.EnrichmentRole;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
@ -68,14 +65,14 @@ public class ForkEnrichment extends AbstractProcessor {
|
|||
.description("A clone of the incoming FlowFile will be routed to this relationship, after adding appropriate attributes.")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> relationships = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
|
||||
REL_ORIGINAL,
|
||||
REL_ENRICHMENT
|
||||
)));
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_ORIGINAL,
|
||||
REL_ENRICHMENT
|
||||
);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -66,7 +66,6 @@ import java.io.OutputStream;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -142,6 +141,13 @@ public class ForkRecord extends AbstractProcessor {
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
RECORD_READER,
|
||||
RECORD_WRITER,
|
||||
MODE,
|
||||
INCLUDE_PARENT_FIELDS
|
||||
);
|
||||
|
||||
public static final Relationship REL_FORK = new Relationship.Builder()
|
||||
.name("fork")
|
||||
.description("The FlowFiles containing the forked records will be routed to this relationship")
|
||||
|
@ -155,23 +161,20 @@ public class ForkRecord extends AbstractProcessor {
|
|||
.description("In case a FlowFile generates an error during the fork operation, it will be routed to this relationship")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_ORIGINAL,
|
||||
REL_FAILURE,
|
||||
REL_FORK
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(RECORD_READER);
|
||||
properties.add(RECORD_WRITER);
|
||||
properties.add(MODE);
|
||||
properties.add(INCLUDE_PARENT_FIELDS);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_ORIGINAL);
|
||||
relationships.add(REL_FAILURE);
|
||||
relationships.add(REL_FORK);
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,20 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
|
@ -51,12 +37,23 @@ import org.apache.nifi.processor.AbstractProcessor;
|
|||
import org.apache.nifi.processor.DataUnit;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.io.OutputStreamCallback;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.scheduling.SchedulingStrategy;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
@SupportsBatching
|
||||
@Tags({"test", "random", "generate", "load"})
|
||||
@InputRequirement(Requirement.INPUT_FORBIDDEN)
|
||||
|
@ -132,35 +129,27 @@ public class GenerateFlowFile extends AbstractProcessor {
|
|||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
FILE_SIZE,
|
||||
BATCH_SIZE,
|
||||
DATA_FORMAT,
|
||||
UNIQUE_FLOWFILES,
|
||||
CUSTOM_TEXT,
|
||||
CHARSET,
|
||||
MIME_TYPE
|
||||
);
|
||||
|
||||
public static final Relationship SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.build();
|
||||
|
||||
private List<PropertyDescriptor> descriptors;
|
||||
private Set<Relationship> relationships;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(SUCCESS);
|
||||
|
||||
private static final char[] TEXT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$%^&*()-_=+/?.,';:\"?<>\n\t ".toCharArray();
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
descriptors.add(FILE_SIZE);
|
||||
descriptors.add(BATCH_SIZE);
|
||||
descriptors.add(DATA_FORMAT);
|
||||
descriptors.add(UNIQUE_FLOWFILES);
|
||||
descriptors.add(CUSTOM_TEXT);
|
||||
descriptors.add(CHARSET);
|
||||
descriptors.add(MIME_TYPE);
|
||||
this.descriptors = Collections.unmodifiableList(descriptors);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(SUCCESS);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return descriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -177,7 +166,7 @@ public class GenerateFlowFile extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
|
|
@ -65,7 +65,6 @@ import java.time.LocalDate;
|
|||
import java.time.ZoneId;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -153,25 +152,26 @@ public class GenerateRecord extends AbstractProcessor {
|
|||
.required(false)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
RECORD_WRITER,
|
||||
NUM_RECORDS,
|
||||
NULLABLE_FIELDS,
|
||||
NULL_PERCENTAGE,
|
||||
SCHEMA_TEXT
|
||||
);
|
||||
|
||||
static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("FlowFiles that are successfully created will be routed to this relationship")
|
||||
.build();
|
||||
|
||||
static final Set<Relationship> RELATIONSHIPS = Collections.singleton(REL_SUCCESS);
|
||||
static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
private volatile Faker faker = new Faker();
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(RECORD_WRITER);
|
||||
properties.add(NUM_RECORDS);
|
||||
properties.add(NULLABLE_FIELDS);
|
||||
properties.add(NULL_PERCENTAGE);
|
||||
properties.add(SCHEMA_TEXT);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -60,7 +60,6 @@ import java.util.Arrays;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -165,31 +164,34 @@ public class GenerateTableFetch extends AbstractDatabaseFetchProcessor {
|
|||
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DBCP_SERVICE,
|
||||
DB_TYPE,
|
||||
TABLE_NAME,
|
||||
COLUMN_NAMES,
|
||||
MAX_VALUE_COLUMN_NAMES,
|
||||
QUERY_TIMEOUT,
|
||||
PARTITION_SIZE,
|
||||
COLUMN_FOR_VALUE_PARTITIONING,
|
||||
WHERE_CLAUSE,
|
||||
CUSTOM_ORDERBY_COLUMN,
|
||||
OUTPUT_EMPTY_FLOWFILE_ON_ZERO_RESULTS
|
||||
);
|
||||
|
||||
public static final Relationship REL_FAILURE = new Relationship.Builder()
|
||||
.name("failure")
|
||||
.description("This relationship is only used when SQL query execution (using an incoming FlowFile) failed. The incoming FlowFile will be penalized and routed to this relationship. "
|
||||
+ "If no incoming connection(s) are specified, this relationship is unused.")
|
||||
.build();
|
||||
|
||||
public GenerateTableFetch() {
|
||||
final Set<Relationship> r = new HashSet<>();
|
||||
r.add(REL_SUCCESS);
|
||||
r.add(REL_FAILURE);
|
||||
relationships = Collections.unmodifiableSet(r);
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
final List<PropertyDescriptor> pds = new ArrayList<>();
|
||||
pds.add(DBCP_SERVICE);
|
||||
pds.add(DB_TYPE);
|
||||
pds.add(TABLE_NAME);
|
||||
pds.add(COLUMN_NAMES);
|
||||
pds.add(MAX_VALUE_COLUMN_NAMES);
|
||||
pds.add(QUERY_TIMEOUT);
|
||||
pds.add(PARTITION_SIZE);
|
||||
pds.add(COLUMN_FOR_VALUE_PARTITIONING);
|
||||
pds.add(WHERE_CLAUSE);
|
||||
pds.add(CUSTOM_ORDERBY_COLUMN);
|
||||
pds.add(OUTPUT_EMPTY_FLOWFILE_ON_ZERO_RESULTS);
|
||||
propDescriptors = Collections.unmodifiableList(pds);
|
||||
public GenerateTableFetch() {
|
||||
propDescriptors = PROPERTIES;
|
||||
relationships = RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,11 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
|
@ -32,10 +27,13 @@ import org.apache.nifi.components.PropertyDescriptor;
|
|||
import org.apache.nifi.components.ValidationContext;
|
||||
import org.apache.nifi.components.ValidationResult;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
||||
import org.apache.nifi.processor.util.file.transfer.GetFileTransfer;
|
||||
import org.apache.nifi.processors.standard.util.FTPTransfer;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
@InputRequirement(Requirement.INPUT_FORBIDDEN)
|
||||
@Tags({"FTP", "get", "retrieve", "files", "fetch", "remote", "ingest", "source", "input"})
|
||||
|
@ -57,44 +55,39 @@ import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
|||
@SeeAlso(PutFTP.class)
|
||||
public class GetFTP extends GetFileTransfer {
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(FTPTransfer.HOSTNAME);
|
||||
properties.add(FTPTransfer.PORT);
|
||||
properties.add(FTPTransfer.USERNAME);
|
||||
properties.add(FTPTransfer.PASSWORD);
|
||||
properties.add(FTPTransfer.CONNECTION_MODE);
|
||||
properties.add(FTPTransfer.TRANSFER_MODE);
|
||||
properties.add(FTPTransfer.REMOTE_PATH);
|
||||
properties.add(FTPTransfer.FILE_FILTER_REGEX);
|
||||
properties.add(FTPTransfer.PATH_FILTER_REGEX);
|
||||
properties.add(FTPTransfer.POLLING_INTERVAL);
|
||||
properties.add(FTPTransfer.RECURSIVE_SEARCH);
|
||||
properties.add(FTPTransfer.FOLLOW_SYMLINK);
|
||||
properties.add(FTPTransfer.IGNORE_DOTTED_FILES);
|
||||
properties.add(FTPTransfer.DELETE_ORIGINAL);
|
||||
properties.add(FTPTransfer.CONNECTION_TIMEOUT);
|
||||
properties.add(FTPTransfer.DATA_TIMEOUT);
|
||||
properties.add(FTPTransfer.MAX_SELECTS);
|
||||
properties.add(FTPTransfer.REMOTE_POLL_BATCH_SIZE);
|
||||
properties.add(FTPTransfer.USE_NATURAL_ORDERING);
|
||||
properties.add(FTPTransfer.PROXY_CONFIGURATION_SERVICE);
|
||||
properties.add(FTPTransfer.PROXY_TYPE);
|
||||
properties.add(FTPTransfer.PROXY_HOST);
|
||||
properties.add(FTPTransfer.PROXY_PORT);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_USERNAME);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_PASSWORD);
|
||||
properties.add(FTPTransfer.BUFFER_SIZE);
|
||||
properties.add(FTPTransfer.UTF8_ENCODING);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
}
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
FTPTransfer.HOSTNAME,
|
||||
FTPTransfer.PORT,
|
||||
FTPTransfer.USERNAME,
|
||||
FTPTransfer.PASSWORD,
|
||||
FTPTransfer.CONNECTION_MODE,
|
||||
FTPTransfer.TRANSFER_MODE,
|
||||
FTPTransfer.REMOTE_PATH,
|
||||
FTPTransfer.FILE_FILTER_REGEX,
|
||||
FTPTransfer.PATH_FILTER_REGEX,
|
||||
FTPTransfer.POLLING_INTERVAL,
|
||||
FTPTransfer.RECURSIVE_SEARCH,
|
||||
FTPTransfer.FOLLOW_SYMLINK,
|
||||
FTPTransfer.IGNORE_DOTTED_FILES,
|
||||
FTPTransfer.DELETE_ORIGINAL,
|
||||
FTPTransfer.CONNECTION_TIMEOUT,
|
||||
FTPTransfer.DATA_TIMEOUT,
|
||||
FTPTransfer.MAX_SELECTS,
|
||||
FTPTransfer.REMOTE_POLL_BATCH_SIZE,
|
||||
FTPTransfer.USE_NATURAL_ORDERING,
|
||||
FTPTransfer.PROXY_CONFIGURATION_SERVICE,
|
||||
FTPTransfer.PROXY_TYPE,
|
||||
FTPTransfer.PROXY_HOST,
|
||||
FTPTransfer.PROXY_PORT,
|
||||
FTPTransfer.HTTP_PROXY_USERNAME,
|
||||
FTPTransfer.HTTP_PROXY_PASSWORD,
|
||||
FTPTransfer.BUFFER_SIZE,
|
||||
FTPTransfer.UTF8_ENCODING
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.nifi.processor.AbstractProcessor;
|
|||
import org.apache.nifi.processor.DataUnit;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
@ -56,7 +55,6 @@ import java.nio.file.attribute.PosixFilePermissions;
|
|||
import java.time.ZoneId;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -190,6 +188,21 @@ public class GetFile extends AbstractProcessor {
|
|||
.defaultValue("10")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DIRECTORY,
|
||||
FILE_FILTER,
|
||||
PATH_FILTER,
|
||||
BATCH_SIZE,
|
||||
KEEP_SOURCE_FILE,
|
||||
RECURSE,
|
||||
POLLING_INTERVAL,
|
||||
IGNORE_HIDDEN_FILES,
|
||||
MIN_AGE,
|
||||
MAX_AGE,
|
||||
MIN_SIZE,
|
||||
MAX_SIZE
|
||||
);
|
||||
|
||||
public static final String FILE_CREATION_TIME_ATTRIBUTE = "file.creationTime";
|
||||
public static final String FILE_LAST_MODIFY_TIME_ATTRIBUTE = "file.lastModifiedTime";
|
||||
public static final String FILE_LAST_ACCESS_TIME_ATTRIBUTE = "file.lastAccessTime";
|
||||
|
@ -200,8 +213,7 @@ public class GetFile extends AbstractProcessor {
|
|||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success").description("All files are routed to success").build();
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
private Set<Relationship> relationships;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
private final BlockingQueue<File> fileQueue = new LinkedBlockingQueue<>();
|
||||
private final Set<File> inProcess = new HashSet<>(); // guarded by queueLock
|
||||
|
@ -212,36 +224,14 @@ public class GetFile extends AbstractProcessor {
|
|||
|
||||
private final AtomicLong queueLastUpdated = new AtomicLong(0L);
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(DIRECTORY);
|
||||
properties.add(FILE_FILTER);
|
||||
properties.add(PATH_FILTER);
|
||||
properties.add(BATCH_SIZE);
|
||||
properties.add(KEEP_SOURCE_FILE);
|
||||
properties.add(RECURSE);
|
||||
properties.add(POLLING_INTERVAL);
|
||||
properties.add(IGNORE_HIDDEN_FILES);
|
||||
properties.add(MIN_AGE);
|
||||
properties.add(MAX_AGE);
|
||||
properties.add(MIN_SIZE);
|
||||
properties.add(MAX_SIZE);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
@ -446,7 +436,7 @@ public class GetFile extends AbstractProcessor {
|
|||
flowFile = session.putAttribute(flowFile, CoreAttributes.PATH.key(), relativePathString);
|
||||
flowFile = session.putAttribute(flowFile, CoreAttributes.ABSOLUTE_PATH.key(), absPathString);
|
||||
Map<String, String> attributes = getAttributesFromFile(filePath);
|
||||
if (attributes.size() > 0) {
|
||||
if (!attributes.isEmpty()) {
|
||||
flowFile = session.putAllAttributes(flowFile, attributes);
|
||||
}
|
||||
|
||||
|
|
|
@ -16,11 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.WritesAttribute;
|
||||
|
@ -32,12 +27,15 @@ import org.apache.nifi.components.PropertyDescriptor;
|
|||
import org.apache.nifi.components.ValidationContext;
|
||||
import org.apache.nifi.components.ValidationResult;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
||||
import org.apache.nifi.processor.util.file.transfer.GetFileTransfer;
|
||||
import org.apache.nifi.processors.standard.util.FTPTransfer;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
||||
import org.apache.nifi.processors.standard.util.SFTPTransfer;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
@InputRequirement(Requirement.INPUT_FORBIDDEN)
|
||||
@Tags({"sftp", "get", "retrieve", "files", "fetch", "remote", "ingest", "source", "input"})
|
||||
@CapabilityDescription("Fetches files from an SFTP Server and creates FlowFiles from them")
|
||||
|
@ -56,50 +54,45 @@ import org.apache.nifi.processors.standard.util.SFTPTransfer;
|
|||
@SeeAlso(PutSFTP.class)
|
||||
public class GetSFTP extends GetFileTransfer {
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(SFTPTransfer.HOSTNAME);
|
||||
properties.add(SFTPTransfer.PORT);
|
||||
properties.add(SFTPTransfer.USERNAME);
|
||||
properties.add(SFTPTransfer.PASSWORD);
|
||||
properties.add(SFTPTransfer.PRIVATE_KEY_PATH);
|
||||
properties.add(SFTPTransfer.PRIVATE_KEY_PASSPHRASE);
|
||||
properties.add(SFTPTransfer.REMOTE_PATH);
|
||||
properties.add(SFTPTransfer.FILE_FILTER_REGEX);
|
||||
properties.add(SFTPTransfer.PATH_FILTER_REGEX);
|
||||
properties.add(SFTPTransfer.POLLING_INTERVAL);
|
||||
properties.add(SFTPTransfer.RECURSIVE_SEARCH);
|
||||
properties.add(SFTPTransfer.FOLLOW_SYMLINK);
|
||||
properties.add(SFTPTransfer.IGNORE_DOTTED_FILES);
|
||||
properties.add(SFTPTransfer.DELETE_ORIGINAL);
|
||||
properties.add(SFTPTransfer.CONNECTION_TIMEOUT);
|
||||
properties.add(SFTPTransfer.DATA_TIMEOUT);
|
||||
properties.add(SFTPTransfer.HOST_KEY_FILE);
|
||||
properties.add(SFTPTransfer.MAX_SELECTS);
|
||||
properties.add(SFTPTransfer.REMOTE_POLL_BATCH_SIZE);
|
||||
properties.add(SFTPTransfer.STRICT_HOST_KEY_CHECKING);
|
||||
properties.add(SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT);
|
||||
properties.add(SFTPTransfer.USE_COMPRESSION);
|
||||
properties.add(SFTPTransfer.USE_NATURAL_ORDERING);
|
||||
properties.add(SFTPTransfer.PROXY_CONFIGURATION_SERVICE);
|
||||
properties.add(FTPTransfer.PROXY_TYPE);
|
||||
properties.add(FTPTransfer.PROXY_HOST);
|
||||
properties.add(FTPTransfer.PROXY_PORT);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_USERNAME);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_PASSWORD);
|
||||
properties.add(SFTPTransfer.CIPHERS_ALLOWED);
|
||||
properties.add(SFTPTransfer.KEY_ALGORITHMS_ALLOWED);
|
||||
properties.add(SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED);
|
||||
properties.add(SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
}
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
SFTPTransfer.HOSTNAME,
|
||||
SFTPTransfer.PORT,
|
||||
SFTPTransfer.USERNAME,
|
||||
SFTPTransfer.PASSWORD,
|
||||
SFTPTransfer.PRIVATE_KEY_PATH,
|
||||
SFTPTransfer.PRIVATE_KEY_PASSPHRASE,
|
||||
SFTPTransfer.REMOTE_PATH,
|
||||
SFTPTransfer.FILE_FILTER_REGEX,
|
||||
SFTPTransfer.PATH_FILTER_REGEX,
|
||||
SFTPTransfer.POLLING_INTERVAL,
|
||||
SFTPTransfer.RECURSIVE_SEARCH,
|
||||
SFTPTransfer.FOLLOW_SYMLINK,
|
||||
SFTPTransfer.IGNORE_DOTTED_FILES,
|
||||
SFTPTransfer.DELETE_ORIGINAL,
|
||||
SFTPTransfer.CONNECTION_TIMEOUT,
|
||||
SFTPTransfer.DATA_TIMEOUT,
|
||||
SFTPTransfer.HOST_KEY_FILE,
|
||||
SFTPTransfer.MAX_SELECTS,
|
||||
SFTPTransfer.REMOTE_POLL_BATCH_SIZE,
|
||||
SFTPTransfer.STRICT_HOST_KEY_CHECKING,
|
||||
SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT,
|
||||
SFTPTransfer.USE_COMPRESSION,
|
||||
SFTPTransfer.USE_NATURAL_ORDERING,
|
||||
SFTPTransfer.PROXY_CONFIGURATION_SERVICE,
|
||||
FTPTransfer.PROXY_TYPE,
|
||||
FTPTransfer.PROXY_HOST,
|
||||
FTPTransfer.PROXY_PORT,
|
||||
FTPTransfer.HTTP_PROXY_USERNAME,
|
||||
FTPTransfer.HTTP_PROXY_PASSWORD,
|
||||
SFTPTransfer.CIPHERS_ALLOWED,
|
||||
SFTPTransfer.KEY_ALGORITHMS_ALLOWED,
|
||||
SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED,
|
||||
SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,7 +16,15 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import jakarta.servlet.AsyncContext;
|
||||
import jakarta.servlet.DispatcherType;
|
||||
import jakarta.servlet.MultipartConfigElement;
|
||||
import jakarta.servlet.ServletException;
|
||||
import jakarta.servlet.http.Cookie;
|
||||
import jakarta.servlet.http.HttpServlet;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import jakarta.servlet.http.Part;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
|
@ -59,14 +67,6 @@ import org.eclipse.jetty.server.ServerConnector;
|
|||
import org.eclipse.jetty.util.thread.QueuedThreadPool;
|
||||
|
||||
import javax.net.ssl.SSLContext;
|
||||
import jakarta.servlet.AsyncContext;
|
||||
import jakarta.servlet.DispatcherType;
|
||||
import jakarta.servlet.MultipartConfigElement;
|
||||
import jakarta.servlet.ServletException;
|
||||
import jakarta.servlet.http.Cookie;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import jakarta.servlet.http.Part;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
|
@ -74,7 +74,6 @@ import java.net.URI;
|
|||
import java.net.URLDecoder;
|
||||
import java.security.Principal;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
|
@ -90,10 +89,10 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static jakarta.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE;
|
||||
import static jakarta.servlet.http.HttpServletResponse.SC_BAD_REQUEST;
|
||||
import static jakarta.servlet.http.HttpServletResponse.SC_METHOD_NOT_ALLOWED;
|
||||
import static jakarta.servlet.http.HttpServletResponse.SC_NOT_FOUND;
|
||||
import static jakarta.servlet.http.HttpServletResponse.SC_BAD_REQUEST;
|
||||
import static jakarta.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE;
|
||||
|
||||
@InputRequirement(Requirement.INPUT_FORBIDDEN)
|
||||
@Tags({"http", "https", "request", "listen", "ingress", "web service"})
|
||||
|
@ -197,7 +196,7 @@ public class HandleHttpRequest extends AbstractProcessor {
|
|||
.description("HTTP Protocols supported for Application Layer Protocol Negotiation with TLS")
|
||||
.required(true)
|
||||
.allowableValues(HttpProtocolStrategy.class)
|
||||
.defaultValue(HttpProtocolStrategy.HTTP_1_1.getValue())
|
||||
.defaultValue(HttpProtocolStrategy.HTTP_1_1)
|
||||
.dependsOn(SSL_CONTEXT)
|
||||
.build();
|
||||
public static final PropertyDescriptor URL_CHARACTER_SET = new PropertyDescriptor.Builder()
|
||||
|
@ -312,37 +311,36 @@ public class HandleHttpRequest extends AbstractProcessor {
|
|||
.addValidator(StandardValidators.DATA_SIZE_VALIDATOR)
|
||||
.defaultValue("512 KB")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
PORT,
|
||||
HOSTNAME,
|
||||
SSL_CONTEXT,
|
||||
HTTP_PROTOCOL_STRATEGY,
|
||||
HTTP_CONTEXT_MAP,
|
||||
PATH_REGEX,
|
||||
URL_CHARACTER_SET,
|
||||
ALLOW_GET,
|
||||
ALLOW_POST,
|
||||
ALLOW_PUT,
|
||||
ALLOW_DELETE,
|
||||
ALLOW_HEAD,
|
||||
ALLOW_OPTIONS,
|
||||
MAXIMUM_THREADS,
|
||||
ADDITIONAL_METHODS,
|
||||
CLIENT_AUTH,
|
||||
CONTAINER_QUEUE_SIZE,
|
||||
MULTIPART_REQUEST_MAX_SIZE,
|
||||
MULTIPART_READ_BUFFER_SIZE,
|
||||
PARAMETERS_TO_ATTRIBUTES
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("All content that is received is routed to the 'success' relationship")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> propertyDescriptors;
|
||||
|
||||
static {
|
||||
List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
descriptors.add(PORT);
|
||||
descriptors.add(HOSTNAME);
|
||||
descriptors.add(SSL_CONTEXT);
|
||||
descriptors.add(HTTP_PROTOCOL_STRATEGY);
|
||||
descriptors.add(HTTP_CONTEXT_MAP);
|
||||
descriptors.add(PATH_REGEX);
|
||||
descriptors.add(URL_CHARACTER_SET);
|
||||
descriptors.add(ALLOW_GET);
|
||||
descriptors.add(ALLOW_POST);
|
||||
descriptors.add(ALLOW_PUT);
|
||||
descriptors.add(ALLOW_DELETE);
|
||||
descriptors.add(ALLOW_HEAD);
|
||||
descriptors.add(ALLOW_OPTIONS);
|
||||
descriptors.add(MAXIMUM_THREADS);
|
||||
descriptors.add(ADDITIONAL_METHODS);
|
||||
descriptors.add(CLIENT_AUTH);
|
||||
descriptors.add(CONTAINER_QUEUE_SIZE);
|
||||
descriptors.add(MULTIPART_REQUEST_MAX_SIZE);
|
||||
descriptors.add(MULTIPART_READ_BUFFER_SIZE);
|
||||
descriptors.add(PARAMETERS_TO_ATTRIBUTES);
|
||||
propertyDescriptors = Collections.unmodifiableList(descriptors);
|
||||
}
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
private volatile Server server;
|
||||
private volatile boolean ready;
|
||||
|
@ -354,12 +352,12 @@ public class HandleHttpRequest extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return propertyDescriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return Collections.singleton(REL_SUCCESS);
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
@ -389,7 +387,7 @@ public class HandleHttpRequest extends AbstractProcessor {
|
|||
serverConnectorFactory.setWantClientAuth(wantClientAuth);
|
||||
final SSLContext sslContext = sslService == null ? null : sslService.createContext();
|
||||
serverConnectorFactory.setSslContext(sslContext);
|
||||
final HttpProtocolStrategy httpProtocolStrategy = HttpProtocolStrategy.valueOf(context.getProperty(HTTP_PROTOCOL_STRATEGY).getValue());
|
||||
final HttpProtocolStrategy httpProtocolStrategy = context.getProperty(HTTP_PROTOCOL_STRATEGY).asAllowableValue(HttpProtocolStrategy.class);
|
||||
serverConnectorFactory.setApplicationLayerProtocols(httpProtocolStrategy.getApplicationLayerProtocols());
|
||||
|
||||
final ServerConnector serverConnector = serverConnectorFactory.getServerConnector();
|
||||
|
@ -606,7 +604,7 @@ public class HandleHttpRequest extends AbstractProcessor {
|
|||
request.setAttribute(ServletContextRequest.MULTIPART_CONFIG_ELEMENT, new MultipartConfigElement(tempDir, requestMaxSize, requestMaxSize, readBufferSize));
|
||||
List<Part> parts = null;
|
||||
try {
|
||||
parts = Collections.unmodifiableList(new ArrayList<>(request.getParts()));
|
||||
parts = List.copyOf(request.getParts());
|
||||
int allPartsCount = parts.size();
|
||||
final String contextIdentifier = UUID.randomUUID().toString();
|
||||
for (int i = 0; i < allPartsCount; i++) {
|
||||
|
|
|
@ -16,16 +16,7 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
|
@ -47,6 +38,12 @@ import org.apache.nifi.processor.util.StandardValidators;
|
|||
import org.apache.nifi.processors.standard.util.HTTPUtils;
|
||||
import org.apache.nifi.util.StopWatch;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
@Tags({"http", "https", "response", "egress", "web service"})
|
||||
@CapabilityDescription("Sends an HTTP Response to the Requestor that generated a FlowFile. This Processor is designed to be used in conjunction with "
|
||||
|
@ -85,6 +82,12 @@ public class HandleHttpResponse extends AbstractProcessor {
|
|||
.required(false)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
STATUS_CODE,
|
||||
HTTP_CONTEXT_MAP,
|
||||
ATTRIBUTES_AS_HEADERS_REGEX
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("FlowFiles will be routed to this Relationship after the response has been successfully sent to the requestor")
|
||||
|
@ -95,21 +98,16 @@ public class HandleHttpResponse extends AbstractProcessor {
|
|||
+ "for instance, if the connection times out or if NiFi is restarted before responding to the HTTP Request.")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS, REL_FAILURE);
|
||||
|
||||
@Override
|
||||
public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(STATUS_CODE);
|
||||
properties.add(HTTP_CONTEXT_MAP);
|
||||
properties.add(ATTRIBUTES_AS_HEADERS_REGEX);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
relationships.add(REL_FAILURE);
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -217,7 +215,7 @@ public class HandleHttpResponse extends AbstractProcessor {
|
|||
}
|
||||
|
||||
private static boolean isNumber(final String value) {
|
||||
if (value.length() == 0) {
|
||||
if (value.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -38,7 +38,6 @@ import org.apache.nifi.migration.PropertyConfiguration;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
@ -60,9 +59,7 @@ import java.io.FileInputStream;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
@ -134,13 +131,19 @@ public class IdentifyMimeType extends AbstractProcessor {
|
|||
.dependsOn(CONFIG_STRATEGY, REPLACE, MERGE)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
USE_FILENAME_IN_DETECTION,
|
||||
CONFIG_STRATEGY,
|
||||
MIME_CONFIG_BODY,
|
||||
MIME_CONFIG_FILE
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("All FlowFiles are routed to success")
|
||||
.build();
|
||||
|
||||
private Set<Relationship> relationships;
|
||||
private List<PropertyDescriptor> properties;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
private final TikaConfig config;
|
||||
private Detector detector;
|
||||
|
@ -151,21 +154,6 @@ public class IdentifyMimeType extends AbstractProcessor {
|
|||
this.config = TikaConfig.getDefaultConfig();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(USE_FILENAME_IN_DETECTION);
|
||||
properties.add(CONFIG_STRATEGY);
|
||||
properties.add(MIME_CONFIG_BODY);
|
||||
properties.add(MIME_CONFIG_FILE);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
|
||||
final Set<Relationship> rels = new HashSet<>();
|
||||
rels.add(REL_SUCCESS);
|
||||
this.relationships = Collections.unmodifiableSet(rels);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void migrateProperties(PropertyConfiguration config) {
|
||||
if (!config.hasProperty(CONFIG_STRATEGY)) {
|
||||
|
@ -212,12 +200,12 @@ public class IdentifyMimeType extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -109,7 +109,6 @@ import java.time.format.DateTimeFormatter;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -177,7 +176,7 @@ public class InvokeHTTP extends AbstractProcessor {
|
|||
private static final Pattern FORM_DATA_NAME_PARAMETER_PATTERN = Pattern.compile("post:form:(?<formDataName>.*)$");
|
||||
private static final String FORM_DATA_NAME_GROUP = "formDataName";
|
||||
|
||||
private static final Set<String> IGNORED_REQUEST_ATTRIBUTES = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
|
||||
private static final Set<String> IGNORED_REQUEST_ATTRIBUTES = Set.of(
|
||||
STATUS_CODE,
|
||||
STATUS_MESSAGE,
|
||||
RESPONSE_BODY,
|
||||
|
@ -189,7 +188,7 @@ public class InvokeHTTP extends AbstractProcessor {
|
|||
EXCEPTION_MESSAGE,
|
||||
CoreAttributes.UUID.key(),
|
||||
CoreAttributes.PATH.key()
|
||||
)));
|
||||
);
|
||||
|
||||
public static final PropertyDescriptor HTTP_METHOD = new PropertyDescriptor.Builder()
|
||||
.name("HTTP Method")
|
||||
|
@ -345,7 +344,7 @@ public class InvokeHTTP extends AbstractProcessor {
|
|||
.name("Request Content-Encoding")
|
||||
.description("HTTP Content-Encoding applied to request body during transmission. The receiving server must support the selected encoding to avoid request failures.")
|
||||
.required(true)
|
||||
.defaultValue(ContentEncodingStrategy.DISABLED.getValue())
|
||||
.defaultValue(ContentEncodingStrategy.DISABLED)
|
||||
.allowableValues(ContentEncodingStrategy.class)
|
||||
.dependsOn(HTTP_METHOD, HttpMethod.PATCH.name(), HttpMethod.POST.name(), HttpMethod.PUT.name())
|
||||
.build();
|
||||
|
@ -656,7 +655,7 @@ public class InvokeHTTP extends AbstractProcessor {
|
|||
} else if (oldValue == null) { // new property
|
||||
newDynamicPropertyNames.add(descriptor.getName());
|
||||
}
|
||||
this.dynamicPropertyNames = Collections.unmodifiableSet(newDynamicPropertyNames);
|
||||
this.dynamicPropertyNames = Set.copyOf(newDynamicPropertyNames);
|
||||
} else {
|
||||
// compile the attributes-to-send filter pattern
|
||||
if (REQUEST_HEADER_ATTRIBUTES_PATTERN.getName().equalsIgnoreCase(descriptor.getName())) {
|
||||
|
@ -753,7 +752,7 @@ public class InvokeHTTP extends AbstractProcessor {
|
|||
}
|
||||
|
||||
if (context.getProperty(HTTP2_DISABLED).asBoolean()) {
|
||||
okHttpClientBuilder.protocols(Collections.singletonList(Protocol.HTTP_1_1));
|
||||
okHttpClientBuilder.protocols(List.of(Protocol.HTTP_1_1));
|
||||
}
|
||||
|
||||
okHttpClientBuilder.followRedirects(context.getProperty(RESPONSE_REDIRECTS_ENABLED).asBoolean());
|
||||
|
@ -1033,8 +1032,8 @@ public class InvokeHTTP extends AbstractProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
final String contentEncoding = context.getProperty(REQUEST_CONTENT_ENCODING).getValue();
|
||||
final ContentEncodingStrategy contentEncodingStrategy = ContentEncodingStrategy.valueOf(contentEncoding);
|
||||
final ContentEncodingStrategy contentEncodingStrategy =
|
||||
context.getProperty(REQUEST_CONTENT_ENCODING).asAllowableValue(ContentEncodingStrategy.class);
|
||||
if (ContentEncodingStrategy.GZIP == contentEncodingStrategy) {
|
||||
requestBuilder.addHeader(HttpHeader.CONTENT_ENCODING.getHeader(), ContentEncodingStrategy.GZIP.getValue().toLowerCase());
|
||||
}
|
||||
|
@ -1111,7 +1110,7 @@ public class InvokeHTTP extends AbstractProcessor {
|
|||
}
|
||||
};
|
||||
|
||||
if (propertyDescriptors.size() > 0 || StringUtils.isNotEmpty(formDataName)) {
|
||||
if (!propertyDescriptors.isEmpty() || StringUtils.isNotEmpty(formDataName)) {
|
||||
// we have form data
|
||||
MultipartBody.Builder builder = new Builder().setType(MultipartBody.FORM);
|
||||
boolean useFileName = context.getProperty(REQUEST_FORM_DATA_FILENAME_ENABLED).asBoolean();
|
||||
|
@ -1248,7 +1247,7 @@ public class InvokeHTTP extends AbstractProcessor {
|
|||
sb.append(entry.getKey());
|
||||
sb.append(": ");
|
||||
if (list.size() == 1) {
|
||||
sb.append(list.get(0));
|
||||
sb.append(list.getFirst());
|
||||
} else {
|
||||
sb.append(list);
|
||||
}
|
||||
|
|
|
@ -71,11 +71,8 @@ import org.apache.nifi.util.db.JdbcProperties;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Queue;
|
||||
|
@ -189,18 +186,18 @@ public class JoinEnrichment extends BinFiles {
|
|||
.defaultValue("10000")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> properties = Collections.unmodifiableList(Arrays.asList(
|
||||
ORIGINAL_RECORD_READER,
|
||||
ENRICHMENT_RECORD_READER,
|
||||
RECORD_WRITER,
|
||||
JOIN_STRATEGY,
|
||||
SQL,
|
||||
DEFAULT_PRECISION,
|
||||
DEFAULT_SCALE,
|
||||
INSERTION_RECORD_PATH,
|
||||
MAX_BIN_COUNT,
|
||||
TIMEOUT
|
||||
));
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
ORIGINAL_RECORD_READER,
|
||||
ENRICHMENT_RECORD_READER,
|
||||
RECORD_WRITER,
|
||||
JOIN_STRATEGY,
|
||||
SQL,
|
||||
DEFAULT_PRECISION,
|
||||
DEFAULT_SCALE,
|
||||
INSERTION_RECORD_PATH,
|
||||
MAX_BIN_COUNT,
|
||||
TIMEOUT
|
||||
);
|
||||
|
||||
// Relationships
|
||||
static final Relationship REL_JOINED = new Relationship.Builder()
|
||||
|
@ -223,23 +220,23 @@ public class JoinEnrichment extends BinFiles {
|
|||
"relationship.")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> relationships = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(
|
||||
REL_JOINED,
|
||||
REL_ORIGINAL,
|
||||
REL_TIMEOUT,
|
||||
REL_FAILURE
|
||||
)));
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_JOINED,
|
||||
REL_ORIGINAL,
|
||||
REL_TIMEOUT,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private final SqlJoinCache sqlJoinCache = new SqlJoinCache(getLogger());
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@OnStopped
|
||||
|
@ -307,10 +304,10 @@ public class JoinEnrichment extends BinFiles {
|
|||
|
||||
session.transfer(flowFiles, REL_TIMEOUT);
|
||||
|
||||
final FlowFile flowFile = flowFiles.get(0);
|
||||
final FlowFile flowFile = flowFiles.getFirst();
|
||||
final EnrichmentRole role = getEnrichmentRole(flowFile);
|
||||
final String missingType = (role == null) ? "other" : getOtherEnrichmentRole(role).name();
|
||||
getLogger().warn("Timed out waiting for the {} FlowFile to match {}; routing to {}", missingType, flowFiles.get(0), REL_TIMEOUT.getName());
|
||||
getLogger().warn("Timed out waiting for the {} FlowFile to match {}; routing to {}", missingType, flowFiles.getFirst(), REL_TIMEOUT.getName());
|
||||
session.commitAsync();
|
||||
|
||||
return new BinProcessingResult(true);
|
||||
|
@ -413,8 +410,7 @@ public class JoinEnrichment extends BinFiles {
|
|||
try (final InputStream rawIn = session.read(flowFile)) {
|
||||
final Map<String, String> enrichmentAttributes = flowFile.getAttributes();
|
||||
final RecordReader reader = recordReaderFactory.createRecordReader(enrichmentAttributes, rawIn, flowFile.getSize(), getLogger());
|
||||
final RecordSchema schema = reader.getSchema();
|
||||
return schema;
|
||||
return reader.getSchema();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -466,20 +462,12 @@ public class JoinEnrichment extends BinFiles {
|
|||
}
|
||||
|
||||
private EnrichmentRole getOtherEnrichmentRole(final EnrichmentRole role) {
|
||||
if (role == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
switch (role) {
|
||||
case ENRICHMENT:
|
||||
return EnrichmentRole.ORIGINAL;
|
||||
case ORIGINAL:
|
||||
return EnrichmentRole.ENRICHMENT;
|
||||
case UNKNOWN:
|
||||
return EnrichmentRole.UNKNOWN;
|
||||
}
|
||||
|
||||
return null;
|
||||
return switch (role) {
|
||||
case ENRICHMENT -> EnrichmentRole.ORIGINAL;
|
||||
case ORIGINAL -> EnrichmentRole.ENRICHMENT;
|
||||
case UNKNOWN -> EnrichmentRole.UNKNOWN;
|
||||
case null -> null;
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -60,10 +60,8 @@ import java.sql.DatabaseMetaData;
|
|||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -148,12 +146,6 @@ public class ListDatabaseTables extends AbstractProcessor {
|
|||
public static final String DB_TABLE_REMARKS = "db.table.remarks";
|
||||
public static final String DB_TABLE_COUNT = "db.table.count";
|
||||
|
||||
// Relationships
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("All FlowFiles that are received are routed to success")
|
||||
.build();
|
||||
|
||||
// Property descriptors
|
||||
public static final PropertyDescriptor DBCP_SERVICE = new PropertyDescriptor.Builder()
|
||||
.name("list-db-tables-db-connection")
|
||||
|
@ -235,39 +227,33 @@ public class ListDatabaseTables extends AbstractProcessor {
|
|||
.identifiesControllerService(RecordSetWriterFactory.class)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DBCP_SERVICE,
|
||||
CATALOG,
|
||||
SCHEMA_PATTERN,
|
||||
TABLE_NAME_PATTERN,
|
||||
TABLE_TYPES,
|
||||
INCLUDE_COUNT,
|
||||
RECORD_WRITER,
|
||||
REFRESH_INTERVAL
|
||||
);
|
||||
|
||||
private static final List<PropertyDescriptor> propertyDescriptors;
|
||||
private static final Set<Relationship> relationships;
|
||||
// Relationships
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("All FlowFiles that are received are routed to success")
|
||||
.build();
|
||||
|
||||
/*
|
||||
* Will ensure that the list of property descriptors is build only once.
|
||||
* Will also create a Set of relationships
|
||||
*/
|
||||
static {
|
||||
final List<PropertyDescriptor> _propertyDescriptors = new ArrayList<>();
|
||||
_propertyDescriptors.add(DBCP_SERVICE);
|
||||
_propertyDescriptors.add(CATALOG);
|
||||
_propertyDescriptors.add(SCHEMA_PATTERN);
|
||||
_propertyDescriptors.add(TABLE_NAME_PATTERN);
|
||||
_propertyDescriptors.add(TABLE_TYPES);
|
||||
_propertyDescriptors.add(INCLUDE_COUNT);
|
||||
_propertyDescriptors.add(RECORD_WRITER);
|
||||
_propertyDescriptors.add(REFRESH_INTERVAL);
|
||||
propertyDescriptors = Collections.unmodifiableList(_propertyDescriptors);
|
||||
|
||||
final Set<Relationship> _relationships = new HashSet<>();
|
||||
_relationships.add(REL_SUCCESS);
|
||||
relationships = Collections.unmodifiableSet(_relationships);
|
||||
}
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return propertyDescriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -437,7 +423,6 @@ public class ListDatabaseTables extends AbstractProcessor {
|
|||
}
|
||||
|
||||
static class RecordTableListingWriter implements TableListingWriter {
|
||||
private static final RecordSchema RECORD_SCHEMA;
|
||||
public static final String TABLE_NAME = "tableName";
|
||||
public static final String TABLE_CATALOG = "catalog";
|
||||
public static final String TABLE_SCHEMA = "schemaName";
|
||||
|
@ -446,19 +431,15 @@ public class ListDatabaseTables extends AbstractProcessor {
|
|||
public static final String TABLE_REMARKS = "remarks";
|
||||
public static final String TABLE_ROW_COUNT = "rowCount";
|
||||
|
||||
|
||||
static {
|
||||
final List<RecordField> fields = new ArrayList<>();
|
||||
fields.add(new RecordField(TABLE_NAME, RecordFieldType.STRING.getDataType(), false));
|
||||
fields.add(new RecordField(TABLE_CATALOG, RecordFieldType.STRING.getDataType()));
|
||||
fields.add(new RecordField(TABLE_SCHEMA, RecordFieldType.STRING.getDataType()));
|
||||
fields.add(new RecordField(TABLE_FULLNAME, RecordFieldType.STRING.getDataType(), false));
|
||||
fields.add(new RecordField(TABLE_TYPE, RecordFieldType.STRING.getDataType(), false));
|
||||
fields.add(new RecordField(TABLE_REMARKS, RecordFieldType.STRING.getDataType(), false));
|
||||
fields.add(new RecordField(TABLE_ROW_COUNT, RecordFieldType.LONG.getDataType(), false));
|
||||
RECORD_SCHEMA = new SimpleRecordSchema(fields);
|
||||
}
|
||||
|
||||
private static final RecordSchema RECORD_SCHEMA = new SimpleRecordSchema(List.of(
|
||||
new RecordField(TABLE_NAME, RecordFieldType.STRING.getDataType(), false),
|
||||
new RecordField(TABLE_CATALOG, RecordFieldType.STRING.getDataType()),
|
||||
new RecordField(TABLE_SCHEMA, RecordFieldType.STRING.getDataType()),
|
||||
new RecordField(TABLE_FULLNAME, RecordFieldType.STRING.getDataType(), false),
|
||||
new RecordField(TABLE_TYPE, RecordFieldType.STRING.getDataType(), false),
|
||||
new RecordField(TABLE_REMARKS, RecordFieldType.STRING.getDataType(), false),
|
||||
new RecordField(TABLE_ROW_COUNT, RecordFieldType.LONG.getDataType(), false)
|
||||
));
|
||||
|
||||
private final ProcessSession session;
|
||||
private final RecordSetWriterFactory writerFactory;
|
||||
|
@ -496,6 +477,7 @@ public class ListDatabaseTables extends AbstractProcessor {
|
|||
if (writeResult.getRecordCount() == 0) {
|
||||
session.remove(flowFile);
|
||||
} else {
|
||||
// todo 13590
|
||||
final Map<String, String> attributes = new HashMap<>(writeResult.getAttributes());
|
||||
attributes.put("record.count", String.valueOf(writeResult.getRecordCount()));
|
||||
flowFile = session.putAllAttributes(flowFile, attributes);
|
||||
|
|
|
@ -34,15 +34,14 @@ import org.apache.nifi.components.ValidationResult;
|
|||
import org.apache.nifi.components.state.Scope;
|
||||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
||||
import org.apache.nifi.processor.util.file.transfer.ListFileTransfer;
|
||||
import org.apache.nifi.processor.util.list.ListedEntityTracker;
|
||||
import org.apache.nifi.processors.standard.util.FTPTransfer;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
||||
import org.apache.nifi.scheduling.SchedulingStrategy;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import org.apache.nifi.scheduling.SchedulingStrategy;
|
||||
|
||||
@PrimaryNodeOnly
|
||||
@TriggerSerially
|
||||
|
@ -71,42 +70,45 @@ import org.apache.nifi.scheduling.SchedulingStrategy;
|
|||
@DefaultSchedule(strategy = SchedulingStrategy.TIMER_DRIVEN, period = "1 min")
|
||||
public class ListFTP extends ListFileTransfer {
|
||||
|
||||
private static final PropertyDescriptor PORT =
|
||||
new PropertyDescriptor.Builder().fromPropertyDescriptor(UNDEFAULTED_PORT).defaultValue("21").build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
FILE_TRANSFER_LISTING_STRATEGY,
|
||||
HOSTNAME,
|
||||
PORT,
|
||||
USERNAME,
|
||||
FTPTransfer.PASSWORD,
|
||||
REMOTE_PATH,
|
||||
RECORD_WRITER,
|
||||
DISTRIBUTED_CACHE_SERVICE,
|
||||
FTPTransfer.RECURSIVE_SEARCH,
|
||||
FTPTransfer.FOLLOW_SYMLINK,
|
||||
FTPTransfer.FILE_FILTER_REGEX,
|
||||
FTPTransfer.PATH_FILTER_REGEX,
|
||||
FTPTransfer.IGNORE_DOTTED_FILES,
|
||||
FTPTransfer.REMOTE_POLL_BATCH_SIZE,
|
||||
FTPTransfer.CONNECTION_TIMEOUT,
|
||||
FTPTransfer.DATA_TIMEOUT,
|
||||
FTPTransfer.CONNECTION_MODE,
|
||||
FTPTransfer.TRANSFER_MODE,
|
||||
FTPTransfer.PROXY_CONFIGURATION_SERVICE,
|
||||
FTPTransfer.PROXY_TYPE,
|
||||
FTPTransfer.PROXY_HOST,
|
||||
FTPTransfer.PROXY_PORT,
|
||||
FTPTransfer.HTTP_PROXY_USERNAME,
|
||||
FTPTransfer.HTTP_PROXY_PASSWORD,
|
||||
FTPTransfer.BUFFER_SIZE,
|
||||
TARGET_SYSTEM_TIMESTAMP_PRECISION,
|
||||
ListedEntityTracker.TRACKING_STATE_CACHE,
|
||||
ListedEntityTracker.TRACKING_TIME_WINDOW,
|
||||
ListedEntityTracker.INITIAL_LISTING_TARGET,
|
||||
FTPTransfer.UTF8_ENCODING
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final PropertyDescriptor port = new PropertyDescriptor.Builder().fromPropertyDescriptor(UNDEFAULTED_PORT).defaultValue("21").build();
|
||||
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(FILE_TRANSFER_LISTING_STRATEGY);
|
||||
properties.add(HOSTNAME);
|
||||
properties.add(port);
|
||||
properties.add(USERNAME);
|
||||
properties.add(FTPTransfer.PASSWORD);
|
||||
properties.add(REMOTE_PATH);
|
||||
properties.add(RECORD_WRITER);
|
||||
properties.add(DISTRIBUTED_CACHE_SERVICE);
|
||||
properties.add(FTPTransfer.RECURSIVE_SEARCH);
|
||||
properties.add(FTPTransfer.FOLLOW_SYMLINK);
|
||||
properties.add(FTPTransfer.FILE_FILTER_REGEX);
|
||||
properties.add(FTPTransfer.PATH_FILTER_REGEX);
|
||||
properties.add(FTPTransfer.IGNORE_DOTTED_FILES);
|
||||
properties.add(FTPTransfer.REMOTE_POLL_BATCH_SIZE);
|
||||
properties.add(FTPTransfer.CONNECTION_TIMEOUT);
|
||||
properties.add(FTPTransfer.DATA_TIMEOUT);
|
||||
properties.add(FTPTransfer.CONNECTION_MODE);
|
||||
properties.add(FTPTransfer.TRANSFER_MODE);
|
||||
properties.add(FTPTransfer.PROXY_CONFIGURATION_SERVICE);
|
||||
properties.add(FTPTransfer.PROXY_TYPE);
|
||||
properties.add(FTPTransfer.PROXY_HOST);
|
||||
properties.add(FTPTransfer.PROXY_PORT);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_USERNAME);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_PASSWORD);
|
||||
properties.add(FTPTransfer.BUFFER_SIZE);
|
||||
properties.add(TARGET_SYSTEM_TIMESTAMP_PRECISION);
|
||||
properties.add(ListedEntityTracker.TRACKING_STATE_CACHE);
|
||||
properties.add(ListedEntityTracker.TRACKING_TIME_WINDOW);
|
||||
properties.add(ListedEntityTracker.INITIAL_LISTING_TARGET);
|
||||
properties.add(FTPTransfer.UTF8_ENCODING);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -130,5 +132,4 @@ public class ListFTP extends ListFileTransfer {
|
|||
protected void customValidate(ValidationContext validationContext, Collection<ValidationResult> results) {
|
||||
FTPTransfer.validateProxySpec(validationContext, results);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -42,9 +42,9 @@ import org.apache.nifi.processor.ProcessContext;
|
|||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileInfo;
|
||||
import org.apache.nifi.processor.util.list.AbstractListProcessor;
|
||||
import org.apache.nifi.processor.util.list.ListedEntityTracker;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileInfo;
|
||||
import org.apache.nifi.scheduling.SchedulingStrategy;
|
||||
import org.apache.nifi.serialization.record.RecordSchema;
|
||||
import org.apache.nifi.util.Tuple;
|
||||
|
@ -67,12 +67,10 @@ import java.time.Instant;
|
|||
import java.time.ZoneId;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.LinkedList;
|
||||
|
@ -269,9 +267,32 @@ public class ListFile extends AbstractListProcessor<FileInfo> {
|
|||
.defaultValue("3 mins")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DIRECTORY,
|
||||
LISTING_STRATEGY,
|
||||
RECURSE,
|
||||
RECORD_WRITER,
|
||||
DIRECTORY_LOCATION,
|
||||
FILE_FILTER,
|
||||
PATH_FILTER,
|
||||
INCLUDE_FILE_ATTRIBUTES,
|
||||
MIN_AGE,
|
||||
MAX_AGE,
|
||||
MIN_SIZE,
|
||||
MAX_SIZE,
|
||||
IGNORE_HIDDEN_FILES,
|
||||
TARGET_SYSTEM_TIMESTAMP_PRECISION,
|
||||
ListedEntityTracker.TRACKING_STATE_CACHE,
|
||||
ListedEntityTracker.TRACKING_TIME_WINDOW,
|
||||
ListedEntityTracker.INITIAL_LISTING_TARGET,
|
||||
ListedEntityTracker.NODE_IDENTIFIER,
|
||||
TRACK_PERFORMANCE,
|
||||
MAX_TRACKED_FILES,
|
||||
MAX_DISK_OPERATION_TIME,
|
||||
MAX_LISTING_TIME
|
||||
);
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
private Set<Relationship> relationships;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
private volatile ScheduledExecutorService monitoringThreadPool;
|
||||
private volatile Future<?> monitoringFuture;
|
||||
|
@ -292,35 +313,6 @@ public class ListFile extends AbstractListProcessor<FileInfo> {
|
|||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(DIRECTORY);
|
||||
properties.add(LISTING_STRATEGY);
|
||||
properties.add(RECURSE);
|
||||
properties.add(RECORD_WRITER);
|
||||
properties.add(DIRECTORY_LOCATION);
|
||||
properties.add(FILE_FILTER);
|
||||
properties.add(PATH_FILTER);
|
||||
properties.add(INCLUDE_FILE_ATTRIBUTES);
|
||||
properties.add(MIN_AGE);
|
||||
properties.add(MAX_AGE);
|
||||
properties.add(MIN_SIZE);
|
||||
properties.add(MAX_SIZE);
|
||||
properties.add(IGNORE_HIDDEN_FILES);
|
||||
properties.add(TARGET_SYSTEM_TIMESTAMP_PRECISION);
|
||||
properties.add(ListedEntityTracker.TRACKING_STATE_CACHE);
|
||||
properties.add(ListedEntityTracker.TRACKING_TIME_WINDOW);
|
||||
properties.add(ListedEntityTracker.INITIAL_LISTING_TARGET);
|
||||
properties.add(ListedEntityTracker.NODE_IDENTIFIER);
|
||||
properties.add(TRACK_PERFORMANCE);
|
||||
properties.add(MAX_TRACKED_FILES);
|
||||
properties.add(MAX_DISK_OPERATION_TIME);
|
||||
properties.add(MAX_LISTING_TIME);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
|
||||
monitoringThreadPool = Executors.newScheduledThreadPool(1, r -> {
|
||||
final Thread t = Executors.defaultThreadFactory().newThread(r);
|
||||
t.setName("Monitor ListFile Performance [UUID=" + context.getIdentifier() + "]");
|
||||
|
@ -332,12 +324,12 @@ public class ListFile extends AbstractListProcessor<FileInfo> {
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
@ -582,7 +574,7 @@ public class ListFile extends AbstractListProcessor<FileInfo> {
|
|||
final long start = System.currentTimeMillis();
|
||||
final List<FileInfo> result = new LinkedList<>();
|
||||
|
||||
Files.walkFileTree(basePath, Collections.singleton(FileVisitOption.FOLLOW_LINKS), maxDepth, new FileVisitor<Path>() {
|
||||
Files.walkFileTree(basePath, Set.of(FileVisitOption.FOLLOW_LINKS), maxDepth, new FileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult preVisitDirectory(final Path dir, final BasicFileAttributes attributes) {
|
||||
if (Files.isReadable(dir)) {
|
||||
|
|
|
@ -36,21 +36,20 @@ import org.apache.nifi.components.state.Scope;
|
|||
import org.apache.nifi.context.PropertyContext;
|
||||
import org.apache.nifi.processor.DataUnit;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileInfo;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
||||
import org.apache.nifi.processor.util.file.transfer.ListFileTransfer;
|
||||
import org.apache.nifi.processor.util.list.ListedEntityTracker;
|
||||
import org.apache.nifi.processors.standard.util.FTPTransfer;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileInfo;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
||||
import org.apache.nifi.processors.standard.util.SFTPTransfer;
|
||||
import org.apache.nifi.scheduling.SchedulingStrategy;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
import org.apache.nifi.scheduling.SchedulingStrategy;
|
||||
|
||||
@PrimaryNodeOnly
|
||||
@TriggerSerially
|
||||
|
@ -80,52 +79,54 @@ import org.apache.nifi.scheduling.SchedulingStrategy;
|
|||
@DefaultSchedule(strategy = SchedulingStrategy.TIMER_DRIVEN, period = "1 min")
|
||||
public class ListSFTP extends ListFileTransfer {
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
FILE_TRANSFER_LISTING_STRATEGY,
|
||||
SFTPTransfer.HOSTNAME,
|
||||
SFTPTransfer.PORT,
|
||||
SFTPTransfer.USERNAME,
|
||||
SFTPTransfer.PASSWORD,
|
||||
SFTPTransfer.PRIVATE_KEY_PATH,
|
||||
SFTPTransfer.PRIVATE_KEY_PASSPHRASE,
|
||||
REMOTE_PATH,
|
||||
RECORD_WRITER,
|
||||
DISTRIBUTED_CACHE_SERVICE,
|
||||
SFTPTransfer.RECURSIVE_SEARCH,
|
||||
SFTPTransfer.FOLLOW_SYMLINK,
|
||||
SFTPTransfer.FILE_FILTER_REGEX,
|
||||
SFTPTransfer.PATH_FILTER_REGEX,
|
||||
SFTPTransfer.IGNORE_DOTTED_FILES,
|
||||
SFTPTransfer.REMOTE_POLL_BATCH_SIZE,
|
||||
SFTPTransfer.STRICT_HOST_KEY_CHECKING,
|
||||
SFTPTransfer.HOST_KEY_FILE,
|
||||
SFTPTransfer.CONNECTION_TIMEOUT,
|
||||
SFTPTransfer.DATA_TIMEOUT,
|
||||
SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT,
|
||||
TARGET_SYSTEM_TIMESTAMP_PRECISION,
|
||||
SFTPTransfer.USE_COMPRESSION,
|
||||
SFTPTransfer.PROXY_CONFIGURATION_SERVICE,
|
||||
FTPTransfer.PROXY_TYPE,
|
||||
FTPTransfer.PROXY_HOST,
|
||||
FTPTransfer.PROXY_PORT,
|
||||
FTPTransfer.HTTP_PROXY_USERNAME,
|
||||
FTPTransfer.HTTP_PROXY_PASSWORD,
|
||||
ListedEntityTracker.TRACKING_STATE_CACHE,
|
||||
ListedEntityTracker.TRACKING_TIME_WINDOW,
|
||||
ListedEntityTracker.INITIAL_LISTING_TARGET,
|
||||
ListFile.MIN_AGE,
|
||||
ListFile.MAX_AGE,
|
||||
ListFile.MIN_SIZE,
|
||||
ListFile.MAX_SIZE,
|
||||
SFTPTransfer.CIPHERS_ALLOWED,
|
||||
SFTPTransfer.KEY_ALGORITHMS_ALLOWED,
|
||||
SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED,
|
||||
SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED
|
||||
);
|
||||
|
||||
private volatile Predicate<FileInfo> fileFilter;
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(FILE_TRANSFER_LISTING_STRATEGY);
|
||||
properties.add(SFTPTransfer.HOSTNAME);
|
||||
properties.add(SFTPTransfer.PORT);
|
||||
properties.add(SFTPTransfer.USERNAME);
|
||||
properties.add(SFTPTransfer.PASSWORD);
|
||||
properties.add(SFTPTransfer.PRIVATE_KEY_PATH);
|
||||
properties.add(SFTPTransfer.PRIVATE_KEY_PASSPHRASE);
|
||||
properties.add(REMOTE_PATH);
|
||||
properties.add(RECORD_WRITER);
|
||||
properties.add(DISTRIBUTED_CACHE_SERVICE);
|
||||
properties.add(SFTPTransfer.RECURSIVE_SEARCH);
|
||||
properties.add(SFTPTransfer.FOLLOW_SYMLINK);
|
||||
properties.add(SFTPTransfer.FILE_FILTER_REGEX);
|
||||
properties.add(SFTPTransfer.PATH_FILTER_REGEX);
|
||||
properties.add(SFTPTransfer.IGNORE_DOTTED_FILES);
|
||||
properties.add(SFTPTransfer.REMOTE_POLL_BATCH_SIZE);
|
||||
properties.add(SFTPTransfer.STRICT_HOST_KEY_CHECKING);
|
||||
properties.add(SFTPTransfer.HOST_KEY_FILE);
|
||||
properties.add(SFTPTransfer.CONNECTION_TIMEOUT);
|
||||
properties.add(SFTPTransfer.DATA_TIMEOUT);
|
||||
properties.add(SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT);
|
||||
properties.add(TARGET_SYSTEM_TIMESTAMP_PRECISION);
|
||||
properties.add(SFTPTransfer.USE_COMPRESSION);
|
||||
properties.add(SFTPTransfer.PROXY_CONFIGURATION_SERVICE);
|
||||
properties.add(FTPTransfer.PROXY_TYPE);
|
||||
properties.add(FTPTransfer.PROXY_HOST);
|
||||
properties.add(FTPTransfer.PROXY_PORT);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_USERNAME);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_PASSWORD);
|
||||
properties.add(ListedEntityTracker.TRACKING_STATE_CACHE);
|
||||
properties.add(ListedEntityTracker.TRACKING_TIME_WINDOW);
|
||||
properties.add(ListedEntityTracker.INITIAL_LISTING_TARGET);
|
||||
properties.add(ListFile.MIN_AGE);
|
||||
properties.add(ListFile.MAX_AGE);
|
||||
properties.add(ListFile.MIN_SIZE);
|
||||
properties.add(ListFile.MAX_SIZE);
|
||||
properties.add(SFTPTransfer.CIPHERS_ALLOWED);
|
||||
properties.add(SFTPTransfer.KEY_ALGORITHMS_ALLOWED);
|
||||
properties.add(SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED);
|
||||
properties.add(SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -41,10 +41,7 @@ import org.apache.nifi.ssl.SSLContextService;
|
|||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
@ -78,11 +75,6 @@ public class ListenFTP extends AbstractSessionFactoryProcessor {
|
|||
.identifiesControllerService(SSLContextService.class)
|
||||
.build();
|
||||
|
||||
public static final Relationship RELATIONSHIP_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("Relationship for successfully received files.")
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor BIND_ADDRESS = new PropertyDescriptor.Builder()
|
||||
.name("bind-address")
|
||||
.displayName("Bind Address")
|
||||
|
@ -125,17 +117,20 @@ public class ListenFTP extends AbstractSessionFactoryProcessor {
|
|||
.sensitive(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
BIND_ADDRESS,
|
||||
PORT,
|
||||
USERNAME,
|
||||
PASSWORD,
|
||||
SSL_CONTEXT_SERVICE
|
||||
));
|
||||
);
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Collections.unmodifiableSet(new HashSet<>(Collections.singletonList(
|
||||
RELATIONSHIP_SUCCESS
|
||||
)));
|
||||
public static final Relationship RELATIONSHIP_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("Relationship for successfully received files.")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(RELATIONSHIP_SUCCESS);
|
||||
|
||||
private volatile FtpServer ftpServer;
|
||||
private volatile CountDownLatch sessionFactorySetSignal;
|
||||
|
|
|
@ -16,6 +16,9 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import jakarta.servlet.Servlet;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import jakarta.ws.rs.Path;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.documentation.CapabilityDescription;
|
||||
|
@ -52,21 +55,17 @@ import org.apache.nifi.ssl.RestrictedSSLContextService;
|
|||
import org.apache.nifi.ssl.SSLContextService;
|
||||
import org.apache.nifi.stream.io.LeakyBucketStreamThrottler;
|
||||
import org.apache.nifi.stream.io.StreamThrottler;
|
||||
import org.eclipse.jetty.server.Server;
|
||||
import org.eclipse.jetty.server.ServerConnector;
|
||||
import org.eclipse.jetty.ee10.servlet.ServletContextHandler;
|
||||
import org.eclipse.jetty.ee10.servlet.ServletHolder;
|
||||
import org.eclipse.jetty.server.Server;
|
||||
import org.eclipse.jetty.server.ServerConnector;
|
||||
import org.eclipse.jetty.util.thread.QueuedThreadPool;
|
||||
|
||||
import javax.net.ssl.SSLContext;
|
||||
import jakarta.servlet.Servlet;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import jakarta.ws.rs.Path;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -119,11 +118,6 @@ public class ListenHTTP extends AbstractSessionFactoryProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
public static final Relationship RELATIONSHIP_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("Relationship for successfully received FlowFiles")
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor BASE_PATH = new PropertyDescriptor.Builder()
|
||||
.name("Base Path")
|
||||
.description("Base path for incoming connections")
|
||||
|
@ -275,7 +269,7 @@ public class ListenHTTP extends AbstractSessionFactoryProcessor {
|
|||
.dependsOn(RECORD_READER)
|
||||
.build();
|
||||
|
||||
protected static final List<PropertyDescriptor> PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
protected static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
BASE_PATH,
|
||||
PORT,
|
||||
HEALTH_CHECK_PORT,
|
||||
|
@ -293,11 +287,14 @@ public class ListenHTTP extends AbstractSessionFactoryProcessor {
|
|||
MAX_THREAD_POOL_SIZE,
|
||||
RECORD_READER,
|
||||
RECORD_WRITER
|
||||
));
|
||||
);
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Collections.unmodifiableSet(new HashSet<>(Collections.singletonList(
|
||||
RELATIONSHIP_SUCCESS
|
||||
)));
|
||||
public static final Relationship RELATIONSHIP_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("Relationship for successfully received FlowFiles")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(RELATIONSHIP_SUCCESS);
|
||||
|
||||
public static final String CONTEXT_ATTRIBUTE_PROCESSOR = "processor";
|
||||
public static final String CONTEXT_ATTRIBUTE_LOGGER = "logger";
|
||||
|
@ -533,13 +530,9 @@ public class ListenHTTP extends AbstractSessionFactoryProcessor {
|
|||
}
|
||||
|
||||
protected Set<Class<? extends Servlet>> getServerClasses() {
|
||||
final Set<Class<? extends Servlet>> s = new HashSet<>();
|
||||
// NOTE: Servlets added below MUST have a Path annotation
|
||||
// any servlets other than ListenHTTPServlet must have a Path annotation start with /
|
||||
s.add(ListenHTTPServlet.class);
|
||||
s.add(ContentAcknowledgmentServlet.class);
|
||||
s.add(HealthCheckServlet.class);
|
||||
return s;
|
||||
return Set.of(ListenHTTPServlet.class, ContentAcknowledgmentServlet.class, HealthCheckServlet.class);
|
||||
}
|
||||
|
||||
private Set<String> findOldFlowFileIds(final ProcessContext ctx) {
|
||||
|
|
|
@ -41,7 +41,6 @@ import org.apache.nifi.flowfile.attributes.CoreAttributes;
|
|||
import org.apache.nifi.processor.DataUnit;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
@ -60,9 +59,7 @@ import java.net.SocketException;
|
|||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -196,6 +193,23 @@ public class ListenSyslog extends AbstractSyslogProcessor {
|
|||
.dependsOn(PROTOCOL, TCP_VALUE)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
PROTOCOL,
|
||||
PORT,
|
||||
NETWORK_INTF_NAME,
|
||||
SOCKET_KEEP_ALIVE,
|
||||
SSL_CONTEXT_SERVICE,
|
||||
CLIENT_AUTH,
|
||||
RECV_BUFFER_SIZE,
|
||||
MAX_MESSAGE_QUEUE_SIZE,
|
||||
MAX_SOCKET_BUFFER_SIZE,
|
||||
MAX_CONNECTIONS,
|
||||
MAX_BATCH_SIZE,
|
||||
MESSAGE_DELIMITER,
|
||||
PARSE_MESSAGES,
|
||||
CHARSET
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("Syslog messages that match one of the expected formats will be sent out this relationship as a FlowFile per message.")
|
||||
|
@ -205,51 +219,28 @@ public class ListenSyslog extends AbstractSyslogProcessor {
|
|||
.description("Syslog messages that do not match one of the expected formats will be sent out this relationship as a FlowFile per message.")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_INVALID
|
||||
);
|
||||
|
||||
protected static final String RECEIVED_COUNTER = "Messages Received";
|
||||
protected static final String SUCCESS_COUNTER = "FlowFiles Transferred to Success";
|
||||
private static final String DEFAULT_MIME_TYPE = "text/plain";
|
||||
|
||||
private Set<Relationship> relationships;
|
||||
private List<PropertyDescriptor> descriptors;
|
||||
|
||||
private volatile EventServer eventServer;
|
||||
private volatile SyslogParser parser;
|
||||
private volatile BlockingQueue<ByteArrayMessage> syslogEvents = new LinkedBlockingQueue<>();
|
||||
private volatile byte[] messageDemarcatorBytes; //it is only the array reference that is volatile - not the contents.
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
descriptors.add(PROTOCOL);
|
||||
descriptors.add(PORT);
|
||||
descriptors.add(NETWORK_INTF_NAME);
|
||||
descriptors.add(SOCKET_KEEP_ALIVE);
|
||||
descriptors.add(SSL_CONTEXT_SERVICE);
|
||||
descriptors.add(CLIENT_AUTH);
|
||||
descriptors.add(RECV_BUFFER_SIZE);
|
||||
descriptors.add(MAX_MESSAGE_QUEUE_SIZE);
|
||||
descriptors.add(MAX_SOCKET_BUFFER_SIZE);
|
||||
descriptors.add(MAX_CONNECTIONS);
|
||||
descriptors.add(MAX_BATCH_SIZE);
|
||||
descriptors.add(MESSAGE_DELIMITER);
|
||||
descriptors.add(PARSE_MESSAGES);
|
||||
descriptors.add(CHARSET);
|
||||
this.descriptors = Collections.unmodifiableList(descriptors);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
relationships.add(REL_INVALID);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return this.relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return descriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -41,7 +41,6 @@ import org.apache.nifi.processor.AbstractProcessor;
|
|||
import org.apache.nifi.processor.DataUnit;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
@ -62,9 +61,7 @@ import java.time.Duration;
|
|||
import java.time.Instant;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -144,17 +141,35 @@ public class ListenTCP extends AbstractProcessor {
|
|||
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
ListenerProperties.NETWORK_INTF_NAME,
|
||||
ListenerProperties.PORT,
|
||||
ListenerProperties.RECV_BUFFER_SIZE,
|
||||
ListenerProperties.MAX_MESSAGE_QUEUE_SIZE,
|
||||
ListenerProperties.MAX_SOCKET_BUFFER_SIZE,
|
||||
ListenerProperties.CHARSET,
|
||||
ListenerProperties.WORKER_THREADS,
|
||||
ListenerProperties.MAX_BATCH_SIZE,
|
||||
ListenerProperties.MESSAGE_DELIMITER,
|
||||
IDLE_CONNECTION_TIMEOUT,
|
||||
// Deprecated
|
||||
MAX_RECV_THREAD_POOL_SIZE,
|
||||
POOL_RECV_BUFFERS,
|
||||
SSL_CONTEXT_SERVICE,
|
||||
CLIENT_AUTH
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("Messages received successfully will be sent out this relationship.")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
private static final long TRACKING_LOG_INTERVAL = 60000;
|
||||
private final AtomicLong nextTrackingLog = new AtomicLong();
|
||||
private int eventsCapacity;
|
||||
|
||||
protected List<PropertyDescriptor> descriptors;
|
||||
protected Set<Relationship> relationships;
|
||||
protected volatile int port;
|
||||
protected volatile TrackingLinkedBlockingQueue<ByteArrayMessage> events;
|
||||
protected volatile BlockingQueue<ByteArrayMessage> errorEvents;
|
||||
|
@ -162,31 +177,6 @@ public class ListenTCP extends AbstractProcessor {
|
|||
protected volatile byte[] messageDemarcatorBytes;
|
||||
protected volatile EventBatcher<ByteArrayMessage> eventBatcher;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
descriptors.add(ListenerProperties.NETWORK_INTF_NAME);
|
||||
descriptors.add(ListenerProperties.PORT);
|
||||
descriptors.add(ListenerProperties.RECV_BUFFER_SIZE);
|
||||
descriptors.add(ListenerProperties.MAX_MESSAGE_QUEUE_SIZE);
|
||||
descriptors.add(ListenerProperties.MAX_SOCKET_BUFFER_SIZE);
|
||||
descriptors.add(ListenerProperties.CHARSET);
|
||||
descriptors.add(ListenerProperties.WORKER_THREADS);
|
||||
descriptors.add(ListenerProperties.MAX_BATCH_SIZE);
|
||||
descriptors.add(ListenerProperties.MESSAGE_DELIMITER);
|
||||
descriptors.add(IDLE_CONNECTION_TIMEOUT);
|
||||
// Deprecated
|
||||
descriptors.add(MAX_RECV_THREAD_POOL_SIZE);
|
||||
descriptors.add(POOL_RECV_BUFFERS);
|
||||
descriptors.add(SSL_CONTEXT_SERVICE);
|
||||
descriptors.add(CLIENT_AUTH);
|
||||
this.descriptors = Collections.unmodifiableList(descriptors);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
public void onScheduled(ProcessContext context) throws IOException {
|
||||
int workerThreads = context.getProperty(ListenerProperties.WORKER_THREADS).asInteger();
|
||||
|
@ -245,14 +235,14 @@ public class ListenTCP extends AbstractProcessor {
|
|||
FlowFile flowFile = entry.getValue().getFlowFile();
|
||||
final List<ByteArrayMessage> events = entry.getValue().getEvents();
|
||||
|
||||
if (flowFile.getSize() == 0L || events.size() == 0) {
|
||||
if (flowFile.getSize() == 0L || events.isEmpty()) {
|
||||
session.remove(flowFile);
|
||||
getLogger().debug("No data written to FlowFile from batch {}; removing FlowFile", entry.getKey());
|
||||
continue;
|
||||
}
|
||||
|
||||
final Map<String, String> attributes = getAttributes(entry.getValue());
|
||||
addClientCertificateAttributes(attributes, events.get(0));
|
||||
addClientCertificateAttributes(attributes, events.getFirst());
|
||||
flowFile = session.putAllAttributes(flowFile, attributes);
|
||||
|
||||
getLogger().debug("Transferring {} to success", flowFile);
|
||||
|
@ -290,7 +280,7 @@ public class ListenTCP extends AbstractProcessor {
|
|||
|
||||
protected Map<String, String> getAttributes(final FlowFileEventBatch<ByteArrayMessage> batch) {
|
||||
final List<ByteArrayMessage> events = batch.getEvents();
|
||||
final String sender = events.get(0).getSender();
|
||||
final String sender = events.getFirst().getSender();
|
||||
final Map<String, String> attributes = new HashMap<>(3);
|
||||
attributes.put("tcp.sender", sender);
|
||||
attributes.put("tcp.port", String.valueOf(port));
|
||||
|
@ -299,19 +289,19 @@ public class ListenTCP extends AbstractProcessor {
|
|||
|
||||
protected String getTransitUri(final FlowFileEventBatch<ByteArrayMessage> batch) {
|
||||
final List<ByteArrayMessage> events = batch.getEvents();
|
||||
final String sender = events.get(0).getSender();
|
||||
final String sender = events.getFirst().getSender();
|
||||
final String senderHost = sender.startsWith("/") && sender.length() > 1 ? sender.substring(1) : sender;
|
||||
return String.format("tcp://%s:%d", senderHost, port);
|
||||
}
|
||||
|
||||
@Override
|
||||
public final Set<Relationship> getRelationships() {
|
||||
return this.relationships;
|
||||
return this.RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return descriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
private String getMessageDemarcator(final ProcessContext context) {
|
||||
|
|
|
@ -45,7 +45,6 @@ import java.io.IOException;
|
|||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
@ -82,15 +81,17 @@ public class ListenUDP extends AbstractListenEventBatchingProcessor<StandardEven
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.ENVIRONMENT)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> ADDITIONAL_PROPERTIES = List.of(
|
||||
SENDING_HOST,
|
||||
SENDING_HOST_PORT
|
||||
);
|
||||
|
||||
public static final String UDP_PORT_ATTR = "udp.port";
|
||||
public static final String UDP_SENDER_ATTR = "udp.sender";
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getAdditionalProperties() {
|
||||
return Arrays.asList(
|
||||
SENDING_HOST,
|
||||
SENDING_HOST_PORT
|
||||
);
|
||||
return ADDITIONAL_PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -132,7 +133,7 @@ public class ListenUDP extends AbstractListenEventBatchingProcessor<StandardEven
|
|||
|
||||
@Override
|
||||
protected Map<String, String> getAttributes(final FlowFileEventBatch batch) {
|
||||
final String sender = batch.getEvents().get(0).getSender();
|
||||
final String sender = batch.getEvents().getFirst().getSender();
|
||||
final Map<String, String> attributes = new HashMap<>(3);
|
||||
attributes.put(UDP_SENDER_ATTR, sender);
|
||||
attributes.put(UDP_PORT_ATTR, String.valueOf(port));
|
||||
|
@ -141,11 +142,10 @@ public class ListenUDP extends AbstractListenEventBatchingProcessor<StandardEven
|
|||
|
||||
@Override
|
||||
protected String getTransitUri(FlowFileEventBatch batch) {
|
||||
final String sender = batch.getEvents().get(0).getSender();
|
||||
final String sender = batch.getEvents().getFirst().getSender();
|
||||
final String senderHost = sender.startsWith("/") && sender.length() > 1 ? sender.substring(1) : sender;
|
||||
final String transitUri = new StringBuilder().append("udp").append("://").append(senderHost).append(":")
|
||||
return new StringBuilder().append("udp").append("://").append(senderHost).append(":")
|
||||
.append(port).toString();
|
||||
return transitUri;
|
||||
}
|
||||
|
||||
public static class HostValidator implements Validator {
|
||||
|
|
|
@ -61,7 +61,6 @@ import java.io.OutputStream;
|
|||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -142,12 +141,23 @@ public class ListenUDPRecord extends AbstractListenEventProcessor<StandardEvent>
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> ADDITIONAL_PROPERTIES = List.of(
|
||||
POLL_TIMEOUT,
|
||||
BATCH_SIZE,
|
||||
RECORD_READER,
|
||||
RECORD_WRITER,
|
||||
SENDING_HOST,
|
||||
SENDING_HOST_PORT
|
||||
);
|
||||
|
||||
public static final Relationship REL_PARSE_FAILURE = new Relationship.Builder()
|
||||
.name("parse.failure")
|
||||
.description("If a datagram cannot be parsed using the configured Record Reader, the contents of the "
|
||||
+ "message will be routed to this Relationship as its own individual FlowFile.")
|
||||
.build();
|
||||
|
||||
private static final List<Relationship> ADDITIONAL_RELATIONSHIPS = List.of(REL_PARSE_FAILURE);
|
||||
|
||||
public static final String UDP_PORT_ATTR = "udp.port";
|
||||
public static final String UDP_SENDER_ATTR = "udp.sender";
|
||||
public static final String RECORD_COUNT_ATTR = "record.count";
|
||||
|
@ -156,19 +166,12 @@ public class ListenUDPRecord extends AbstractListenEventProcessor<StandardEvent>
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getAdditionalProperties() {
|
||||
return Arrays.asList(
|
||||
POLL_TIMEOUT,
|
||||
BATCH_SIZE,
|
||||
RECORD_READER,
|
||||
RECORD_WRITER,
|
||||
SENDING_HOST,
|
||||
SENDING_HOST_PORT
|
||||
);
|
||||
return ADDITIONAL_PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Relationship> getAdditionalRelationships() {
|
||||
return Arrays.asList(REL_PARSE_FAILURE);
|
||||
return ADDITIONAL_RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -255,7 +258,7 @@ public class ListenUDPRecord extends AbstractListenEventProcessor<StandardEvent>
|
|||
continue;
|
||||
}
|
||||
|
||||
if (records.size() == 0) {
|
||||
if (records.isEmpty()) {
|
||||
handleParseFailure(event, session, null);
|
||||
continue;
|
||||
}
|
||||
|
@ -396,9 +399,8 @@ public class ListenUDPRecord extends AbstractListenEventProcessor<StandardEvent>
|
|||
|
||||
private String getTransitUri(final String sender) {
|
||||
final String senderHost = sender.startsWith("/") && sender.length() > 1 ? sender.substring(1) : sender;
|
||||
final String transitUri = new StringBuilder().append("udp").append("://").append(senderHost).append(":")
|
||||
return new StringBuilder().append("udp").append("://").append(senderHost).append(":")
|
||||
.append(port).toString();
|
||||
return transitUri;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.io.InputStreamCallback;
|
||||
|
@ -43,9 +42,7 @@ import org.eclipse.jetty.util.StringUtil;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
@ -146,6 +143,19 @@ public class LogAttribute extends AbstractProcessor {
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
LOG_LEVEL,
|
||||
LOG_PAYLOAD,
|
||||
ATTRIBUTES_TO_LOG_CSV,
|
||||
ATTRIBUTES_TO_LOG_REGEX,
|
||||
ATTRIBUTES_TO_IGNORE_CSV,
|
||||
ATTRIBUTES_TO_IGNORE_REGEX,
|
||||
LOG_FLOWFILE_PROPERTIES,
|
||||
OUTPUT_FORMAT,
|
||||
LOG_PREFIX,
|
||||
CHARSET
|
||||
);
|
||||
|
||||
public static final String FIFTY_DASHES = "--------------------------------------------------";
|
||||
|
||||
public static enum DebugLevels {
|
||||
|
@ -153,43 +163,22 @@ public class LogAttribute extends AbstractProcessor {
|
|||
}
|
||||
|
||||
public static final long ONE_MB = 1024 * 1024;
|
||||
private Set<Relationship> relationships;
|
||||
private List<PropertyDescriptor> supportedDescriptors;
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("All FlowFiles are routed to this relationship")
|
||||
.build();
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final Set<Relationship> procRels = new HashSet<>();
|
||||
procRels.add(REL_SUCCESS);
|
||||
relationships = Collections.unmodifiableSet(procRels);
|
||||
|
||||
// descriptors
|
||||
final List<PropertyDescriptor> supDescriptors = new ArrayList<>();
|
||||
supDescriptors.add(LOG_LEVEL);
|
||||
supDescriptors.add(LOG_PAYLOAD);
|
||||
supDescriptors.add(ATTRIBUTES_TO_LOG_CSV);
|
||||
supDescriptors.add(ATTRIBUTES_TO_LOG_REGEX);
|
||||
supDescriptors.add(ATTRIBUTES_TO_IGNORE_CSV);
|
||||
supDescriptors.add(ATTRIBUTES_TO_IGNORE_REGEX);
|
||||
supDescriptors.add(LOG_FLOWFILE_PROPERTIES);
|
||||
supDescriptors.add(OUTPUT_FORMAT);
|
||||
supDescriptors.add(LOG_PREFIX);
|
||||
supDescriptors.add(CHARSET);
|
||||
supportedDescriptors = Collections.unmodifiableList(supDescriptors);
|
||||
}
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return supportedDescriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
protected String processFlowFile(final ComponentLog logger, final DebugLevels logLevel, final FlowFile flowFile, final ProcessSession session, final ProcessContext context) {
|
||||
|
@ -331,24 +320,13 @@ public class LogAttribute extends AbstractProcessor {
|
|||
}
|
||||
|
||||
final ComponentLog LOG = getLogger();
|
||||
boolean isLogLevelEnabled = false;
|
||||
switch (logLevel) {
|
||||
case trace:
|
||||
isLogLevelEnabled = LOG.isTraceEnabled();
|
||||
break;
|
||||
case debug:
|
||||
isLogLevelEnabled = LOG.isDebugEnabled();
|
||||
break;
|
||||
case info:
|
||||
isLogLevelEnabled = LOG.isInfoEnabled();
|
||||
break;
|
||||
case warn:
|
||||
isLogLevelEnabled = LOG.isWarnEnabled();
|
||||
break;
|
||||
case error:
|
||||
isLogLevelEnabled = LOG.isErrorEnabled();
|
||||
break;
|
||||
}
|
||||
boolean isLogLevelEnabled = switch (logLevel) {
|
||||
case trace -> LOG.isTraceEnabled();
|
||||
case debug -> LOG.isDebugEnabled();
|
||||
case info -> LOG.isInfoEnabled();
|
||||
case warn -> LOG.isWarnEnabled();
|
||||
case error -> LOG.isErrorEnabled();
|
||||
};
|
||||
|
||||
if (!isLogLevelEnabled) {
|
||||
transferChunk(session);
|
||||
|
@ -367,7 +345,7 @@ public class LogAttribute extends AbstractProcessor {
|
|||
protected static class FlowFilePayloadCallback implements InputStreamCallback {
|
||||
|
||||
private String contents = "";
|
||||
private Charset charset;
|
||||
private final Charset charset;
|
||||
|
||||
public FlowFilePayloadCallback(Charset charset) {
|
||||
this.charset = charset;
|
||||
|
@ -382,6 +360,4 @@ public class LogAttribute extends AbstractProcessor {
|
|||
return contents;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -30,16 +30,12 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.eclipse.jetty.util.StringUtil;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -79,43 +75,31 @@ public class LogMessage extends AbstractProcessor {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
LOG_LEVEL,
|
||||
LOG_PREFIX,
|
||||
LOG_MESSAGE
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("All FlowFiles are routed to this relationship")
|
||||
.build();
|
||||
|
||||
private static final int CHUNK_SIZE = 50;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
enum MessageLogLevel {
|
||||
|
||||
trace, debug, info, warn, error
|
||||
}
|
||||
|
||||
private Set<Relationship> relationships;
|
||||
private List<PropertyDescriptor> supportedDescriptors;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final Set<Relationship> procRels = new HashSet<>();
|
||||
procRels.add(REL_SUCCESS);
|
||||
relationships = Collections.unmodifiableSet(procRels);
|
||||
|
||||
// descriptors
|
||||
final List<PropertyDescriptor> supDescriptors = new ArrayList<>();
|
||||
supDescriptors.add(LOG_LEVEL);
|
||||
supDescriptors.add(LOG_PREFIX);
|
||||
supDescriptors.add(LOG_MESSAGE);
|
||||
supportedDescriptors = Collections.unmodifiableList(supDescriptors);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return supportedDescriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -136,24 +120,13 @@ public class LogMessage extends AbstractProcessor {
|
|||
}
|
||||
|
||||
final ComponentLog logger = getLogger();
|
||||
boolean isLogLevelEnabled = false;
|
||||
switch (logLevel) {
|
||||
case trace:
|
||||
isLogLevelEnabled = logger.isTraceEnabled();
|
||||
break;
|
||||
case debug:
|
||||
isLogLevelEnabled = logger.isDebugEnabled();
|
||||
break;
|
||||
case info:
|
||||
isLogLevelEnabled = logger.isInfoEnabled();
|
||||
break;
|
||||
case warn:
|
||||
isLogLevelEnabled = logger.isWarnEnabled();
|
||||
break;
|
||||
case error:
|
||||
isLogLevelEnabled = logger.isErrorEnabled();
|
||||
break;
|
||||
}
|
||||
boolean isLogLevelEnabled = switch (logLevel) {
|
||||
case trace -> logger.isTraceEnabled();
|
||||
case debug -> logger.isDebugEnabled();
|
||||
case info -> logger.isInfoEnabled();
|
||||
case warn -> logger.isWarnEnabled();
|
||||
case error -> logger.isErrorEnabled();
|
||||
};
|
||||
|
||||
if (isLogLevelEnabled) {
|
||||
processFlowFile(logger, logLevel, flowFile, context);
|
||||
|
|
|
@ -16,20 +16,7 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
|
@ -52,11 +39,21 @@ import org.apache.nifi.lookup.StringLookupService;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
|
@ -88,6 +85,11 @@ public class LookupAttribute extends AbstractProcessor {
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
LOOKUP_SERVICE,
|
||||
INCLUDE_EMPTY_VALUES
|
||||
);
|
||||
|
||||
public static final Relationship REL_MATCHED = new Relationship.Builder()
|
||||
.description("FlowFiles with matching lookups are routed to this relationship")
|
||||
.name("matched")
|
||||
|
@ -103,9 +105,11 @@ public class LookupAttribute extends AbstractProcessor {
|
|||
.name("failure")
|
||||
.build();
|
||||
|
||||
private List<PropertyDescriptor> descriptors;
|
||||
|
||||
private Set<Relationship> relationships;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_MATCHED,
|
||||
REL_UNMATCHED,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private Map<PropertyDescriptor, PropertyValue> dynamicProperties;
|
||||
|
||||
|
@ -114,10 +118,10 @@ public class LookupAttribute extends AbstractProcessor {
|
|||
final List<ValidationResult> errors = new ArrayList<>(super.customValidate(validationContext));
|
||||
|
||||
final Set<PropertyDescriptor> dynamicProperties = validationContext.getProperties().keySet().stream()
|
||||
.filter(prop -> prop.isDynamic())
|
||||
.filter(PropertyDescriptor::isDynamic)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
if (dynamicProperties == null || dynamicProperties.size() < 1) {
|
||||
if (dynamicProperties.isEmpty()) {
|
||||
errors.add(new ValidationResult.Builder()
|
||||
.subject("User-Defined Properties")
|
||||
.valid(false)
|
||||
|
@ -139,7 +143,7 @@ public class LookupAttribute extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return descriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -156,21 +160,7 @@ public class LookupAttribute extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> descriptors = new ArrayList<PropertyDescriptor>();
|
||||
descriptors.add(LOOKUP_SERVICE);
|
||||
descriptors.add(INCLUDE_EMPTY_VALUES);
|
||||
this.descriptors = Collections.unmodifiableList(descriptors);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<Relationship>();
|
||||
relationships.add(REL_MATCHED);
|
||||
relationships.add(REL_UNMATCHED);
|
||||
relationships.add(REL_FAILURE);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
@ -184,7 +174,7 @@ public class LookupAttribute extends AbstractProcessor {
|
|||
dynamicProperties.put(descriptor, value);
|
||||
}
|
||||
}
|
||||
this.dynamicProperties = Collections.unmodifiableMap(dynamicProperties);
|
||||
this.dynamicProperties = Map.copyOf(dynamicProperties);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -236,7 +226,6 @@ public class LookupAttribute extends AbstractProcessor {
|
|||
logger.error(e.getMessage(), e);
|
||||
session.transfer(flowFile, REL_FAILURE);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private boolean putAttribute(final String attributeName, final Optional<String> attributeValue, final Map<String, String> attributes, final boolean includeEmptyValues, final ComponentLog logger) {
|
||||
|
@ -250,5 +239,4 @@ public class LookupAttribute extends AbstractProcessor {
|
|||
}
|
||||
return matched;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -222,6 +222,18 @@ public class LookupRecord extends AbstractProcessor {
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
RECORD_READER,
|
||||
RECORD_WRITER,
|
||||
LOOKUP_SERVICE,
|
||||
ROOT_RECORD_PATH,
|
||||
ROUTING_STRATEGY,
|
||||
RESULT_CONTENTS,
|
||||
REPLACEMENT_STRATEGY,
|
||||
RESULT_RECORD_PATH,
|
||||
CACHE_SIZE
|
||||
);
|
||||
|
||||
static final Relationship REL_MATCHED = new Relationship.Builder()
|
||||
.name("matched")
|
||||
.description("All records for which the lookup returns a value will be routed to this relationship")
|
||||
|
@ -239,11 +251,14 @@ public class LookupRecord extends AbstractProcessor {
|
|||
.description("If a FlowFile cannot be enriched, the unchanged FlowFile will be routed to this relationship")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> MATCHED_COLLECTION = Collections.singleton(REL_MATCHED);
|
||||
private static final Set<Relationship> UNMATCHED_COLLECTION = Collections.singleton(REL_UNMATCHED);
|
||||
private static final Set<Relationship> SUCCESS_COLLECTION = Collections.singleton(REL_SUCCESS);
|
||||
private static final Set<Relationship> MATCHED_COLLECTION = Set.of(REL_MATCHED);
|
||||
private static final Set<Relationship> UNMATCHED_COLLECTION = Set.of(REL_UNMATCHED);
|
||||
private static final Set<Relationship> SUCCESS_COLLECTION = Set.of(REL_SUCCESS);
|
||||
|
||||
private volatile Set<Relationship> relationships = new HashSet<>(Arrays.asList(REL_SUCCESS, REL_FAILURE));
|
||||
private volatile Set<Relationship> relationships = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
private volatile boolean routeToMatchedUnmatched = false;
|
||||
|
||||
@OnScheduled
|
||||
|
@ -258,17 +273,7 @@ public class LookupRecord extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(RECORD_READER);
|
||||
properties.add(RECORD_WRITER);
|
||||
properties.add(LOOKUP_SERVICE);
|
||||
properties.add(ROOT_RECORD_PATH);
|
||||
properties.add(ROUTING_STRATEGY);
|
||||
properties.add(RESULT_CONTENTS);
|
||||
properties.add(REPLACEMENT_STRATEGY);
|
||||
properties.add(RESULT_RECORD_PATH);
|
||||
properties.add(CACHE_SIZE);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -292,7 +297,7 @@ public class LookupRecord extends AbstractProcessor {
|
|||
.collect(Collectors.toSet());
|
||||
|
||||
if (dynamicPropNames.isEmpty()) {
|
||||
return Collections.singleton(new ValidationResult.Builder()
|
||||
return Set.of(new ValidationResult.Builder()
|
||||
.subject("User-Defined Properties")
|
||||
.valid(false)
|
||||
.explanation("At least one user-defined property must be specified.")
|
||||
|
@ -304,7 +309,7 @@ public class LookupRecord extends AbstractProcessor {
|
|||
if (validationContext.getProperty(REPLACEMENT_STRATEGY).getValue().equals(REPLACE_EXISTING_VALUES.getValue())) {
|
||||
// it must be a single key lookup service
|
||||
if (requiredKeys.size() != 1) {
|
||||
return Collections.singleton(new ValidationResult.Builder()
|
||||
return Set.of(new ValidationResult.Builder()
|
||||
.subject(LOOKUP_SERVICE.getDisplayName())
|
||||
.valid(false)
|
||||
.explanation("When using \"" + REPLACE_EXISTING_VALUES.getDisplayName() + "\" as Record Update Strategy, "
|
||||
|
@ -340,18 +345,11 @@ public class LookupRecord extends AbstractProcessor {
|
|||
public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) {
|
||||
if (ROUTING_STRATEGY.equals(descriptor)) {
|
||||
if (ROUTE_TO_MATCHED_UNMATCHED.getValue().equalsIgnoreCase(newValue)) {
|
||||
final Set<Relationship> matchedUnmatchedRels = new HashSet<>();
|
||||
matchedUnmatchedRels.add(REL_MATCHED);
|
||||
matchedUnmatchedRels.add(REL_UNMATCHED);
|
||||
matchedUnmatchedRels.add(REL_FAILURE);
|
||||
this.relationships = matchedUnmatchedRels;
|
||||
this.relationships = Set.of(REL_MATCHED, REL_UNMATCHED, REL_FAILURE);
|
||||
|
||||
this.routeToMatchedUnmatched = true;
|
||||
} else {
|
||||
final Set<Relationship> successRels = new HashSet<>();
|
||||
successRels.add(REL_SUCCESS);
|
||||
successRels.add(REL_FAILURE);
|
||||
this.relationships = successRels;
|
||||
this.relationships = Set.of(REL_SUCCESS, REL_FAILURE);
|
||||
|
||||
this.routeToMatchedUnmatched = false;
|
||||
}
|
||||
|
@ -674,9 +672,7 @@ public class LookupRecord extends AbstractProcessor {
|
|||
final RecordPathResult resultPathResult = resultPath.evaluate(record);
|
||||
|
||||
final String resultContentsValue = context.getProperty(RESULT_CONTENTS).getValue();
|
||||
if (RESULT_RECORD_FIELDS.getValue().equals(resultContentsValue) && lookupValue instanceof Record) {
|
||||
final Record lookupRecord = (Record) lookupValue;
|
||||
|
||||
if (RESULT_RECORD_FIELDS.getValue().equals(resultContentsValue) && lookupValue instanceof Record lookupRecord) {
|
||||
// User wants to add all fields of the resultant Record to the specified Record Path.
|
||||
// If the destination Record Path returns to us a Record, then we will add all field values of
|
||||
// the Lookup Record to the destination Record. However, if the destination Record Path returns
|
||||
|
|
|
@ -280,7 +280,6 @@ public class MergeContent extends BinFiles {
|
|||
MERGE_FORMAT_AVRO_VALUE,
|
||||
"The Avro contents of all FlowFiles will be concatenated together into a single FlowFile");
|
||||
|
||||
|
||||
public static final String TAR_PERMISSIONS_ATTRIBUTE = "tar.permissions";
|
||||
public static final String MERGE_COUNT_ATTRIBUTE = "merge.count";
|
||||
public static final String MERGE_BIN_AGE_ATTRIBUTE = "merge.bin.age";
|
||||
|
@ -398,46 +397,50 @@ public class MergeContent extends BinFiles {
|
|||
.dependsOn(MERGE_FORMAT, MERGE_FORMAT_TAR)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
MERGE_STRATEGY,
|
||||
MERGE_FORMAT,
|
||||
AttributeStrategyUtil.ATTRIBUTE_STRATEGY,
|
||||
CORRELATION_ATTRIBUTE_NAME,
|
||||
METADATA_STRATEGY,
|
||||
addBinPackingDependency(MIN_ENTRIES),
|
||||
addBinPackingDependency(MAX_ENTRIES),
|
||||
addBinPackingDependency(MIN_SIZE),
|
||||
addBinPackingDependency(MAX_SIZE),
|
||||
MAX_BIN_AGE,
|
||||
MAX_BIN_COUNT,
|
||||
DELIMITER_STRATEGY,
|
||||
HEADER,
|
||||
FOOTER,
|
||||
DEMARCATOR,
|
||||
COMPRESSION_LEVEL,
|
||||
KEEP_PATH,
|
||||
TAR_MODIFIED_TIME
|
||||
);
|
||||
|
||||
public static final Relationship REL_MERGED = new Relationship.Builder().name("merged").description("The FlowFile containing the merged content").build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_ORIGINAL,
|
||||
REL_FAILURE,
|
||||
REL_MERGED
|
||||
);
|
||||
|
||||
public static final Pattern NUMBER_PATTERN = Pattern.compile("\\d+");
|
||||
|
||||
// Convenience method to make creation of property descriptors cleaner
|
||||
private static PropertyDescriptor addBinPackingDependency(final PropertyDescriptor original) {
|
||||
return new PropertyDescriptor.Builder().fromPropertyDescriptor(original).dependsOn(MERGE_STRATEGY, MERGE_STRATEGY_BIN_PACK).build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_ORIGINAL);
|
||||
relationships.add(REL_FAILURE);
|
||||
relationships.add(REL_MERGED);
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
descriptors.add(MERGE_STRATEGY);
|
||||
descriptors.add(MERGE_FORMAT);
|
||||
descriptors.add(AttributeStrategyUtil.ATTRIBUTE_STRATEGY);
|
||||
descriptors.add(CORRELATION_ATTRIBUTE_NAME);
|
||||
descriptors.add(METADATA_STRATEGY);
|
||||
descriptors.add(addBinPackingDependency(MIN_ENTRIES));
|
||||
descriptors.add(addBinPackingDependency(MAX_ENTRIES));
|
||||
descriptors.add(addBinPackingDependency(MIN_SIZE));
|
||||
descriptors.add(addBinPackingDependency(MAX_SIZE));
|
||||
descriptors.add(MAX_BIN_AGE);
|
||||
descriptors.add(MAX_BIN_COUNT);
|
||||
descriptors.add(DELIMITER_STRATEGY);
|
||||
descriptors.add(HEADER);
|
||||
descriptors.add(FOOTER);
|
||||
descriptors.add(DEMARCATOR);
|
||||
descriptors.add(COMPRESSION_LEVEL);
|
||||
descriptors.add(KEEP_PATH);
|
||||
descriptors.add(TAR_MODIFIED_TIME);
|
||||
return descriptors;
|
||||
}
|
||||
|
||||
// Convenience method to make creation of property descriptors cleaner
|
||||
private PropertyDescriptor addBinPackingDependency(final PropertyDescriptor original) {
|
||||
return new PropertyDescriptor.Builder().fromPropertyDescriptor(original).dependsOn(MERGE_STRATEGY, MERGE_STRATEGY_BIN_PACK).build();
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -501,32 +504,19 @@ public class MergeContent extends BinFiles {
|
|||
protected BinProcessingResult processBin(final Bin bin, final ProcessContext context) throws ProcessException {
|
||||
final BinProcessingResult binProcessingResult = new BinProcessingResult(true);
|
||||
final String mergeFormat = context.getProperty(MERGE_FORMAT).getValue();
|
||||
MergeBin merger;
|
||||
switch (mergeFormat) {
|
||||
case MERGE_FORMAT_TAR_VALUE:
|
||||
merger = new TarMerge();
|
||||
break;
|
||||
case MERGE_FORMAT_ZIP_VALUE:
|
||||
merger = new ZipMerge(context.getProperty(COMPRESSION_LEVEL).asInteger());
|
||||
break;
|
||||
case MERGE_FORMAT_FLOWFILE_STREAM_V3_VALUE:
|
||||
merger = new FlowFileStreamMerger(new FlowFilePackagerV3(), StandardFlowFileMediaType.VERSION_3.getMediaType());
|
||||
break;
|
||||
case MERGE_FORMAT_FLOWFILE_STREAM_V2_VALUE:
|
||||
merger = new FlowFileStreamMerger(new FlowFilePackagerV2(), StandardFlowFileMediaType.VERSION_2.getMediaType());
|
||||
break;
|
||||
case MERGE_FORMAT_FLOWFILE_TAR_V1_VALUE:
|
||||
merger = new FlowFileStreamMerger(new FlowFilePackagerV1(), StandardFlowFileMediaType.VERSION_1.getMediaType());
|
||||
break;
|
||||
case MERGE_FORMAT_CONCAT_VALUE:
|
||||
merger = new BinaryConcatenationMerge();
|
||||
break;
|
||||
case MERGE_FORMAT_AVRO_VALUE:
|
||||
merger = new AvroMerge();
|
||||
break;
|
||||
default:
|
||||
throw new AssertionError();
|
||||
}
|
||||
MergeBin merger = switch (mergeFormat) {
|
||||
case MERGE_FORMAT_TAR_VALUE -> new TarMerge();
|
||||
case MERGE_FORMAT_ZIP_VALUE -> new ZipMerge(context.getProperty(COMPRESSION_LEVEL).asInteger());
|
||||
case MERGE_FORMAT_FLOWFILE_STREAM_V3_VALUE ->
|
||||
new FlowFileStreamMerger(new FlowFilePackagerV3(), StandardFlowFileMediaType.VERSION_3.getMediaType());
|
||||
case MERGE_FORMAT_FLOWFILE_STREAM_V2_VALUE ->
|
||||
new FlowFileStreamMerger(new FlowFilePackagerV2(), StandardFlowFileMediaType.VERSION_2.getMediaType());
|
||||
case MERGE_FORMAT_FLOWFILE_TAR_V1_VALUE ->
|
||||
new FlowFileStreamMerger(new FlowFilePackagerV1(), StandardFlowFileMediaType.VERSION_1.getMediaType());
|
||||
case MERGE_FORMAT_CONCAT_VALUE -> new BinaryConcatenationMerge();
|
||||
case MERGE_FORMAT_AVRO_VALUE -> new AvroMerge();
|
||||
default -> throw new AssertionError();
|
||||
};
|
||||
|
||||
final AttributeStrategy attributeStrategy = AttributeStrategyUtil.strategyFor(context);
|
||||
|
||||
|
@ -546,7 +536,7 @@ public class MergeContent extends BinFiles {
|
|||
return binProcessingResult;
|
||||
}
|
||||
|
||||
Collections.sort(contents, new FragmentComparator());
|
||||
contents.sort(new FragmentComparator());
|
||||
}
|
||||
|
||||
FlowFile bundle = merger.merge(bin, context);
|
||||
|
@ -736,8 +726,8 @@ public class MergeContent extends BinFiles {
|
|||
private byte[] getDelimiterFileContent(final ProcessContext context, final List<FlowFile> flowFiles, final PropertyDescriptor descriptor)
|
||||
throws IOException {
|
||||
byte[] property = null;
|
||||
if (flowFiles != null && flowFiles.size() > 0) {
|
||||
final FlowFile flowFile = flowFiles.get(0);
|
||||
if (flowFiles != null && !flowFiles.isEmpty()) {
|
||||
final FlowFile flowFile = flowFiles.getFirst();
|
||||
if (flowFile != null) {
|
||||
final String value = context.getProperty(descriptor).evaluateAttributeExpressions(flowFile).getValue();
|
||||
if (value != null) {
|
||||
|
@ -750,8 +740,8 @@ public class MergeContent extends BinFiles {
|
|||
|
||||
private byte[] getDelimiterTextContent(final ProcessContext context, final List<FlowFile> flowFiles, final PropertyDescriptor descriptor) {
|
||||
byte[] property = null;
|
||||
if (flowFiles != null && flowFiles.size() > 0) {
|
||||
final FlowFile flowFile = flowFiles.get(0);
|
||||
if (flowFiles != null && !flowFiles.isEmpty()) {
|
||||
final FlowFile flowFile = flowFiles.getFirst();
|
||||
if (flowFile != null) {
|
||||
final String value = context.getProperty(descriptor).evaluateAttributeExpressions(flowFile).getValue();
|
||||
if (value != null) {
|
||||
|
@ -784,14 +774,14 @@ public class MergeContent extends BinFiles {
|
|||
path = path.getNameCount() == 1 ? null : path.subpath(1, path.getNameCount());
|
||||
}
|
||||
|
||||
return path == null ? "" : path.toString() + "/";
|
||||
return path == null ? "" : path + "/";
|
||||
}
|
||||
|
||||
private String createFilename(final List<FlowFile> flowFiles) {
|
||||
if (flowFiles.size() == 1) {
|
||||
return flowFiles.get(0).getAttribute(CoreAttributes.FILENAME.key());
|
||||
return flowFiles.getFirst().getAttribute(CoreAttributes.FILENAME.key());
|
||||
} else {
|
||||
final FlowFile ff = flowFiles.get(0);
|
||||
final FlowFile ff = flowFiles.getFirst();
|
||||
final String origFilename = ff.getAttribute(SEGMENT_ORIGINAL_FILENAME);
|
||||
if (origFilename != null) {
|
||||
return origFilename;
|
||||
|
@ -870,7 +860,7 @@ public class MergeContent extends BinFiles {
|
|||
private long getMaxEntrySize(final List<FlowFile> contents) {
|
||||
final OptionalLong maxSize = contents.stream()
|
||||
.parallel()
|
||||
.mapToLong(ff -> ff.getSize())
|
||||
.mapToLong(FlowFile::getSize)
|
||||
.max();
|
||||
return maxSize.orElse(0L);
|
||||
}
|
||||
|
@ -1029,7 +1019,7 @@ public class MergeContent extends BinFiles {
|
|||
final Map<String, byte[]> metadata = new TreeMap<>();
|
||||
final AtomicReference<Schema> schema = new AtomicReference<>(null);
|
||||
final AtomicReference<String> inputCodec = new AtomicReference<>(null);
|
||||
final DataFileWriter<GenericRecord> writer = new DataFileWriter<>(new GenericDatumWriter<GenericRecord>());
|
||||
final DataFileWriter<GenericRecord> writer = new DataFileWriter<>(new GenericDatumWriter<>());
|
||||
|
||||
// we don't pass the parents to the #create method because the parents belong to different sessions
|
||||
FlowFile bundle = session.create(contents);
|
||||
|
|
|
@ -62,7 +62,6 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
@ -186,7 +185,6 @@ public class MergeRecord extends AbstractSessionFactoryProcessor {
|
|||
+ "will be grouped together. All FlowFiles in this group must have the same value for the \"fragment.count\" attribute. The ordering of "
|
||||
+ "the Records that are output is not guaranteed.");
|
||||
|
||||
|
||||
public static final PropertyDescriptor RECORD_READER = new PropertyDescriptor.Builder()
|
||||
.name("record-reader")
|
||||
.displayName("Record Reader")
|
||||
|
@ -277,7 +275,19 @@ public class MergeRecord extends AbstractSessionFactoryProcessor {
|
|||
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
|
||||
.build();
|
||||
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
RECORD_READER,
|
||||
RECORD_WRITER,
|
||||
MERGE_STRATEGY,
|
||||
CORRELATION_ATTRIBUTE_NAME,
|
||||
AttributeStrategyUtil.ATTRIBUTE_STRATEGY,
|
||||
MIN_RECORDS,
|
||||
MAX_RECORDS,
|
||||
MIN_SIZE,
|
||||
MAX_SIZE,
|
||||
MAX_BIN_AGE,
|
||||
MAX_BIN_COUNT
|
||||
);
|
||||
|
||||
public static final Relationship REL_MERGED = new Relationship.Builder()
|
||||
.name("merged")
|
||||
|
@ -292,37 +302,24 @@ public class MergeRecord extends AbstractSessionFactoryProcessor {
|
|||
.description("If the bundle cannot be created, all FlowFiles that would have been used to created the bundle will be transferred to failure")
|
||||
.build();
|
||||
|
||||
private final AtomicReference<RecordBinManager> binManager = new AtomicReference<>();
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_ORIGINAL,
|
||||
REL_FAILURE,
|
||||
REL_MERGED
|
||||
);
|
||||
|
||||
private final AtomicReference<RecordBinManager> binManager = new AtomicReference<>();
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(RECORD_READER);
|
||||
properties.add(RECORD_WRITER);
|
||||
properties.add(MERGE_STRATEGY);
|
||||
properties.add(CORRELATION_ATTRIBUTE_NAME);
|
||||
properties.add(AttributeStrategyUtil.ATTRIBUTE_STRATEGY);
|
||||
properties.add(MIN_RECORDS);
|
||||
properties.add(MAX_RECORDS);
|
||||
properties.add(MIN_SIZE);
|
||||
properties.add(MAX_SIZE);
|
||||
properties.add(MAX_BIN_AGE);
|
||||
properties.add(MAX_BIN_COUNT);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_ORIGINAL);
|
||||
relationships.add(REL_FAILURE);
|
||||
relationships.add(REL_MERGED);
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
|
||||
@OnStopped
|
||||
public final void resetState() {
|
||||
final RecordBinManager manager = binManager.get();
|
||||
|
@ -476,7 +473,6 @@ public class MergeRecord extends AbstractSessionFactoryProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
protected String getGroupId(final ProcessContext context, final FlowFile flowFile, final RecordSchema schema, final ProcessSession session) {
|
||||
final String mergeStrategy = context.getProperty(MERGE_STRATEGY).getValue();
|
||||
if (MERGE_STRATEGY_DEFRAGMENT.getValue().equals(mergeStrategy)) {
|
||||
|
|
|
@ -16,16 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.SideEffectFree;
|
||||
|
@ -48,6 +38,13 @@ import org.apache.nifi.processor.util.StandardValidators;
|
|||
import org.apache.nifi.stream.io.StreamUtils;
|
||||
import org.apache.nifi.util.StopWatch;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
@Tags({"binary", "discard", "keep"})
|
||||
|
@ -60,7 +57,8 @@ public class ModifyBytes extends AbstractProcessor {
|
|||
.name("success")
|
||||
.description("Processed flowfiles.")
|
||||
.build();
|
||||
private final Set<Relationship> relationships;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
public static final PropertyDescriptor START_OFFSET = new PropertyDescriptor.Builder()
|
||||
.name("Start Offset")
|
||||
.displayName("Start Offset")
|
||||
|
@ -87,28 +85,20 @@ public class ModifyBytes extends AbstractProcessor {
|
|||
.allowableValues("true", "false")
|
||||
.defaultValue("false")
|
||||
.build();
|
||||
private final List<PropertyDescriptor> propDescriptors;
|
||||
|
||||
public ModifyBytes() {
|
||||
HashSet<Relationship> r = new HashSet<>();
|
||||
r.add(REL_SUCCESS);
|
||||
relationships = Collections.unmodifiableSet(r);
|
||||
|
||||
ArrayList<PropertyDescriptor> pds = new ArrayList<>();
|
||||
pds.add(START_OFFSET);
|
||||
pds.add(END_OFFSET);
|
||||
pds.add(REMOVE_ALL);
|
||||
propDescriptors = Collections.unmodifiableList(pds);
|
||||
}
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
START_OFFSET,
|
||||
END_OFFSET,
|
||||
REMOVE_ALL
|
||||
);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return propDescriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,20 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.SideEffectFree;
|
||||
|
@ -54,11 +40,22 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
@SideEffectFree
|
||||
@TriggerSerially
|
||||
@TriggerWhenEmpty
|
||||
|
@ -165,6 +162,18 @@ public class MonitorActivity extends AbstractProcessor {
|
|||
.defaultValue(REPORT_NODE_ALL.getValue())
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
THRESHOLD,
|
||||
CONTINUALLY_SEND_MESSAGES,
|
||||
INACTIVITY_MESSAGE,
|
||||
ACTIVITY_RESTORED_MESSAGE,
|
||||
WAIT_FOR_ACTIVITY,
|
||||
RESET_STATE_ON_RESTART,
|
||||
COPY_ATTRIBUTES,
|
||||
MONITORING_SCOPE,
|
||||
REPORTING_NODE
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("All incoming FlowFiles are routed to success")
|
||||
|
@ -180,8 +189,11 @@ public class MonitorActivity extends AbstractProcessor {
|
|||
+ "period of inactivity")
|
||||
.build();
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
private Set<Relationship> relationships;
|
||||
private final static Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_INACTIVE,
|
||||
REL_ACTIVITY_RESTORED
|
||||
);
|
||||
|
||||
private final AtomicBoolean connectedWhenLastTriggered = new AtomicBoolean(false);
|
||||
private final AtomicLong lastInactiveMessage = new AtomicLong();
|
||||
|
@ -190,35 +202,14 @@ public class MonitorActivity extends AbstractProcessor {
|
|||
|
||||
private volatile LocalFlowActivityInfo localFlowActivityInfo;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(THRESHOLD);
|
||||
properties.add(CONTINUALLY_SEND_MESSAGES);
|
||||
properties.add(INACTIVITY_MESSAGE);
|
||||
properties.add(ACTIVITY_RESTORED_MESSAGE);
|
||||
properties.add(WAIT_FOR_ACTIVITY);
|
||||
properties.add(RESET_STATE_ON_RESTART);
|
||||
properties.add(COPY_ATTRIBUTES);
|
||||
properties.add(MONITORING_SCOPE);
|
||||
properties.add(REPORTING_NODE);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
relationships.add(REL_INACTIVE);
|
||||
relationships.add(REL_ACTIVITY_RESTORED);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
@ -277,7 +268,7 @@ public class MonitorActivity extends AbstractProcessor {
|
|||
final boolean firstKnownTransfer = !localFlowActivityInfo.hasSuccessfulTransfer();
|
||||
final boolean flowStateMustBecomeActive = !wasActive || firstKnownTransfer;
|
||||
|
||||
localFlowActivityInfo.update(flowFiles.get(0));
|
||||
localFlowActivityInfo.update(flowFiles.getFirst());
|
||||
|
||||
if (isClusterScope && flowStateMustBecomeActive) {
|
||||
localFlowActivityInfo.forceSync();
|
||||
|
|
|
@ -16,15 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
|
@ -47,6 +38,13 @@ import org.apache.nifi.processor.Relationship;
|
|||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
@SupportsBatching
|
||||
@Tags({"map", "cache", "notify", "distributed", "signal", "release"})
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
|
@ -134,6 +132,15 @@ public class Notify extends AbstractProcessor {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
RELEASE_SIGNAL_IDENTIFIER,
|
||||
SIGNAL_COUNTER_NAME,
|
||||
SIGNAL_COUNTER_DELTA,
|
||||
SIGNAL_BUFFER_COUNT,
|
||||
DISTRIBUTED_CACHE_SERVICE,
|
||||
ATTRIBUTE_CACHE_REGEX
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("All FlowFiles where the release signal has been successfully entered in the cache will be routed to this relationship")
|
||||
|
@ -144,30 +151,19 @@ public class Notify extends AbstractProcessor {
|
|||
.description("When the cache cannot be reached, or if the Release Signal Identifier evaluates to null or empty, FlowFiles will be routed to this relationship")
|
||||
.build();
|
||||
|
||||
private final Set<Relationship> relationships;
|
||||
|
||||
public Notify() {
|
||||
final Set<Relationship> rels = new HashSet<>();
|
||||
rels.add(REL_SUCCESS);
|
||||
rels.add(REL_FAILURE);
|
||||
relationships = Collections.unmodifiableSet(rels);
|
||||
}
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
descriptors.add(RELEASE_SIGNAL_IDENTIFIER);
|
||||
descriptors.add(SIGNAL_COUNTER_NAME);
|
||||
descriptors.add(SIGNAL_COUNTER_DELTA);
|
||||
descriptors.add(SIGNAL_BUFFER_COUNT);
|
||||
descriptors.add(DISTRIBUTED_CACHE_SERVICE);
|
||||
descriptors.add(ATTRIBUTE_CACHE_REGEX);
|
||||
return descriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
private class SignalBuffer {
|
||||
|
@ -177,7 +173,7 @@ public class Notify extends AbstractProcessor {
|
|||
final List<FlowFile> flowFiles = new ArrayList<>();
|
||||
|
||||
int incrementDelta(final String counterName, final int delta) {
|
||||
int current = deltas.containsKey(counterName) ? deltas.get(counterName) : 0;
|
||||
int current = deltas.getOrDefault(counterName, 0);
|
||||
// Zero (0) clears count.
|
||||
int updated = delta == 0 ? 0 : current + delta;
|
||||
deltas.put(counterName, updated);
|
||||
|
@ -253,7 +249,6 @@ public class Notify extends AbstractProcessor {
|
|||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Cached release signal identifier {} counterName {} from FlowFile {}", signalId, counterName, flowFile);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
signalBuffers.forEach((signalId, signalBuffer) -> {
|
||||
|
@ -268,5 +263,4 @@ public class Notify extends AbstractProcessor {
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -130,6 +130,8 @@ public class PackageFlowFile extends AbstractProcessor {
|
|||
.addValidator(StandardValidators.createLongValidator(1, 10_000, true))
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(BATCH_SIZE);
|
||||
|
||||
static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("The packaged FlowFile is sent to this relationship")
|
||||
|
@ -139,15 +141,11 @@ public class PackageFlowFile extends AbstractProcessor {
|
|||
.description("The FlowFiles that were used to create the package are sent to this relationship")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_ORIGINAL
|
||||
);
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTY_DESCRIPTORS = List.of(
|
||||
BATCH_SIZE
|
||||
);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return RELATIONSHIPS;
|
||||
|
@ -155,7 +153,7 @@ public class PackageFlowFile extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return PROPERTY_DESCRIPTORS;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -43,9 +43,7 @@ import org.apache.nifi.syslog.parsers.SyslogParser;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -79,6 +77,8 @@ public class ParseSyslog extends AbstractProcessor {
|
|||
.addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(CHARSET);
|
||||
|
||||
static final Relationship REL_FAILURE = new Relationship.Builder()
|
||||
.name("failure")
|
||||
.description("Any FlowFile that could not be parsed as a Syslog message will be transferred to this Relationship without any attributes being added")
|
||||
|
@ -88,22 +88,21 @@ public class ParseSyslog extends AbstractProcessor {
|
|||
.description("Any FlowFile that is successfully parsed as a Syslog message will be to this Relationship.")
|
||||
.build();
|
||||
|
||||
private SyslogParser parser;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_FAILURE,
|
||||
REL_SUCCESS
|
||||
);
|
||||
|
||||
private SyslogParser parser;
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>(1);
|
||||
properties.add(CHARSET);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_FAILURE);
|
||||
relationships.add(REL_SUCCESS);
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -38,17 +38,15 @@ import org.apache.nifi.processor.Relationship;
|
|||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.stream.io.StreamUtils;
|
||||
import org.apache.nifi.syslog.attributes.SyslogAttributes;
|
||||
import org.apache.nifi.syslog.events.Syslog5424Event;
|
||||
import org.apache.nifi.syslog.keyproviders.SyslogPrefixedKeyProvider;
|
||||
import org.apache.nifi.syslog.parsers.StrictSyslog5424Parser;
|
||||
import org.apache.nifi.syslog.utils.NifiStructuredDataPolicy;
|
||||
import org.apache.nifi.syslog.utils.NilHandlingPolicy;
|
||||
import org.apache.nifi.syslog.parsers.StrictSyslog5424Parser;
|
||||
import org.apache.nifi.syslog.events.Syslog5424Event;
|
||||
import org.apache.nifi.syslog.attributes.SyslogAttributes;
|
||||
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -113,6 +111,12 @@ public class ParseSyslog5424 extends AbstractProcessor {
|
|||
.defaultValue("true")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
CHARSET,
|
||||
NIL_POLICY,
|
||||
INCLUDE_BODY_IN_ATTRIBUTES
|
||||
);
|
||||
|
||||
static final Relationship REL_FAILURE = new Relationship.Builder()
|
||||
.name("failure")
|
||||
.description("Any FlowFile that could not be parsed as a Syslog message will be transferred to this Relationship without any attributes being added")
|
||||
|
@ -122,25 +126,23 @@ public class ParseSyslog5424 extends AbstractProcessor {
|
|||
.description("Any FlowFile that is successfully parsed as a Syslog message will be to this Relationship.")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_FAILURE,
|
||||
REL_SUCCESS
|
||||
);
|
||||
|
||||
private volatile StrictSyslog5424Parser parser;
|
||||
|
||||
private volatile Charset charset;
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>(2);
|
||||
properties.add(CHARSET);
|
||||
properties.add(NIL_POLICY);
|
||||
properties.add(INCLUDE_BODY_IN_ATTRIBUTES);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_FAILURE);
|
||||
relationships.add(REL_SUCCESS);
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
|
|
@ -55,12 +55,10 @@ import org.apache.nifi.serialization.record.util.DataTypeUtils;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -143,6 +141,11 @@ public class PartitionRecord extends AbstractProcessor {
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
RECORD_READER,
|
||||
RECORD_WRITER
|
||||
);
|
||||
|
||||
static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("FlowFiles that are successfully partitioned will be routed to this relationship")
|
||||
|
@ -157,33 +160,32 @@ public class PartitionRecord extends AbstractProcessor {
|
|||
+ "the unchanged FlowFile will be routed to this relationship")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE,
|
||||
REL_ORIGINAL
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(RECORD_READER);
|
||||
properties.add(RECORD_WRITER);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
relationships.add(REL_FAILURE);
|
||||
relationships.add(REL_ORIGINAL);
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Collection<ValidationResult> customValidate(final ValidationContext validationContext) {
|
||||
final boolean hasDynamic = validationContext.getProperties().keySet().stream()
|
||||
.anyMatch(prop -> prop.isDynamic());
|
||||
.anyMatch(PropertyDescriptor::isDynamic);
|
||||
|
||||
if (hasDynamic) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
return Collections.singleton(new ValidationResult.Builder()
|
||||
return Set.of(new ValidationResult.Builder()
|
||||
.subject("User-defined Properties")
|
||||
.valid(false)
|
||||
.explanation("At least one RecordPath must be added to this processor by adding a user-defined property")
|
||||
|
@ -214,10 +216,10 @@ public class PartitionRecord extends AbstractProcessor {
|
|||
final Map<String, RecordPath> recordPaths;
|
||||
try {
|
||||
recordPaths = context.getProperties().keySet().stream()
|
||||
.filter(prop -> prop.isDynamic())
|
||||
.collect(Collectors.toMap(
|
||||
prop -> prop.getName(),
|
||||
prop -> getRecordPath(context, prop, flowFile)));
|
||||
.filter(PropertyDescriptor::isDynamic)
|
||||
.collect(Collectors.toMap(
|
||||
PropertyDescriptor::getName,
|
||||
prop -> getRecordPath(context, prop, flowFile)));
|
||||
} catch (final Exception e) {
|
||||
getLogger().error("Failed to compile RecordPath for {}; routing to failure", flowFile, e);
|
||||
session.transfer(flowFile, REL_FAILURE);
|
||||
|
@ -324,8 +326,7 @@ public class PartitionRecord extends AbstractProcessor {
|
|||
|
||||
private RecordPath getRecordPath(final ProcessContext context, final PropertyDescriptor prop, final FlowFile flowFile) {
|
||||
final String pathText = context.getProperty(prop).evaluateAttributeExpressions(flowFile).getValue();
|
||||
final RecordPath recordPath = recordPathCache.getCompiled(pathText);
|
||||
return recordPath;
|
||||
return recordPathCache.getCompiled(pathText);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -365,10 +366,9 @@ public class PartitionRecord extends AbstractProcessor {
|
|||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (!(obj instanceof ValueWrapper)) {
|
||||
if (!(obj instanceof ValueWrapper other)) {
|
||||
return false;
|
||||
}
|
||||
final ValueWrapper other = (ValueWrapper) obj;
|
||||
if (value == null && other.value == null) {
|
||||
return true;
|
||||
}
|
||||
|
@ -401,7 +401,7 @@ public class PartitionRecord extends AbstractProcessor {
|
|||
}
|
||||
|
||||
// If value is null, don't create an attribute
|
||||
final Object value = values.get(0).get();
|
||||
final Object value = values.getFirst().get();
|
||||
if (value == null) {
|
||||
continue;
|
||||
}
|
||||
|
@ -440,10 +440,9 @@ public class PartitionRecord extends AbstractProcessor {
|
|||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (!(obj instanceof RecordValueMap)) {
|
||||
if (!(obj instanceof RecordValueMap other)) {
|
||||
return false;
|
||||
}
|
||||
final RecordValueMap other = (RecordValueMap) obj;
|
||||
return values.equals(other.values);
|
||||
}
|
||||
|
||||
|
|
|
@ -78,7 +78,6 @@ import java.sql.Types;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Base64;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.HexFormat;
|
||||
|
@ -89,12 +88,11 @@ import java.util.Set;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.lang.String.format;
|
||||
import static org.apache.nifi.expression.ExpressionLanguageScope.ENVIRONMENT;
|
||||
import static org.apache.nifi.expression.ExpressionLanguageScope.FLOWFILE_ATTRIBUTES;
|
||||
import static org.apache.nifi.expression.ExpressionLanguageScope.NONE;
|
||||
import static org.apache.nifi.expression.ExpressionLanguageScope.ENVIRONMENT;
|
||||
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
@Tags({"sql", "record", "jdbc", "put", "database", "update", "insert", "delete"})
|
||||
|
@ -156,7 +154,11 @@ public class PutDatabaseRecord extends AbstractProcessor {
|
|||
+ "such as an invalid query or an integrity constraint violation")
|
||||
.build();
|
||||
|
||||
protected static Set<Relationship> relationships;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE,
|
||||
REL_RETRY
|
||||
);
|
||||
|
||||
// Properties
|
||||
static final PropertyDescriptor RECORD_READER_FACTORY = new Builder()
|
||||
|
@ -387,7 +389,7 @@ public class PutDatabaseRecord extends AbstractProcessor {
|
|||
static final PropertyDescriptor DB_TYPE;
|
||||
|
||||
protected static final Map<String, DatabaseAdapter> dbAdapters;
|
||||
protected static List<PropertyDescriptor> propDescriptors;
|
||||
protected static List<PropertyDescriptor> properties;
|
||||
private Cache<SchemaKey, TableSchema> schemaCache;
|
||||
|
||||
static {
|
||||
|
@ -410,53 +412,45 @@ public class PutDatabaseRecord extends AbstractProcessor {
|
|||
.required(false)
|
||||
.build();
|
||||
|
||||
final Set<Relationship> r = new HashSet<>();
|
||||
r.add(REL_SUCCESS);
|
||||
r.add(REL_FAILURE);
|
||||
r.add(REL_RETRY);
|
||||
relationships = Collections.unmodifiableSet(r);
|
||||
|
||||
final List<PropertyDescriptor> pds = new ArrayList<>();
|
||||
pds.add(RECORD_READER_FACTORY);
|
||||
pds.add(DB_TYPE);
|
||||
pds.add(STATEMENT_TYPE);
|
||||
pds.add(STATEMENT_TYPE_RECORD_PATH);
|
||||
pds.add(DATA_RECORD_PATH);
|
||||
pds.add(DBCP_SERVICE);
|
||||
pds.add(CATALOG_NAME);
|
||||
pds.add(SCHEMA_NAME);
|
||||
pds.add(TABLE_NAME);
|
||||
pds.add(BINARY_STRING_FORMAT);
|
||||
pds.add(TRANSLATE_FIELD_NAMES);
|
||||
pds.add(UNMATCHED_FIELD_BEHAVIOR);
|
||||
pds.add(UNMATCHED_COLUMN_BEHAVIOR);
|
||||
pds.add(UPDATE_KEYS);
|
||||
pds.add(FIELD_CONTAINING_SQL);
|
||||
pds.add(ALLOW_MULTIPLE_STATEMENTS);
|
||||
pds.add(QUOTE_IDENTIFIERS);
|
||||
pds.add(QUOTE_TABLE_IDENTIFIER);
|
||||
pds.add(QUERY_TIMEOUT);
|
||||
pds.add(RollbackOnFailure.ROLLBACK_ON_FAILURE);
|
||||
pds.add(TABLE_SCHEMA_CACHE_SIZE);
|
||||
pds.add(MAX_BATCH_SIZE);
|
||||
pds.add(AUTO_COMMIT);
|
||||
|
||||
propDescriptors = Collections.unmodifiableList(pds);
|
||||
properties = List.of(
|
||||
RECORD_READER_FACTORY,
|
||||
DB_TYPE,
|
||||
STATEMENT_TYPE,
|
||||
STATEMENT_TYPE_RECORD_PATH,
|
||||
DATA_RECORD_PATH,
|
||||
DBCP_SERVICE,
|
||||
CATALOG_NAME,
|
||||
SCHEMA_NAME,
|
||||
TABLE_NAME,
|
||||
BINARY_STRING_FORMAT,
|
||||
TRANSLATE_FIELD_NAMES,
|
||||
UNMATCHED_FIELD_BEHAVIOR,
|
||||
UNMATCHED_COLUMN_BEHAVIOR,
|
||||
UPDATE_KEYS,
|
||||
FIELD_CONTAINING_SQL,
|
||||
ALLOW_MULTIPLE_STATEMENTS,
|
||||
QUOTE_IDENTIFIERS,
|
||||
QUOTE_TABLE_IDENTIFIER,
|
||||
QUERY_TIMEOUT,
|
||||
RollbackOnFailure.ROLLBACK_ON_FAILURE,
|
||||
TABLE_SCHEMA_CACHE_SIZE,
|
||||
MAX_BATCH_SIZE,
|
||||
AUTO_COMMIT
|
||||
);
|
||||
}
|
||||
|
||||
private DatabaseAdapter databaseAdapter;
|
||||
private volatile Function<Record, String> recordPathOperationType;
|
||||
private volatile RecordPath dataRecordPath;
|
||||
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return propDescriptors;
|
||||
return properties;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -898,9 +892,7 @@ public class PutDatabaseRecord extends AbstractProcessor {
|
|||
dest[j] = (Byte) src[j];
|
||||
}
|
||||
currentValue = dest;
|
||||
} else if (currentValue instanceof String) {
|
||||
final String stringValue = (String) currentValue;
|
||||
|
||||
} else if (currentValue instanceof String stringValue) {
|
||||
if (BINARY_STRING_FORMAT_BASE64.getValue().equals(binaryStringFormat)) {
|
||||
currentValue = Base64.getDecoder().decode(stringValue);
|
||||
} else if (BINARY_STRING_FORMAT_HEXADECIMAL.getValue().equals(binaryStringFormat)) {
|
||||
|
@ -1062,11 +1054,11 @@ public class PutDatabaseRecord extends AbstractProcessor {
|
|||
|
||||
private List<Record> getDataRecords(final Record outerRecord) {
|
||||
if (dataRecordPath == null) {
|
||||
return Collections.singletonList(outerRecord);
|
||||
return List.of(outerRecord);
|
||||
}
|
||||
|
||||
final RecordPathResult result = dataRecordPath.evaluate(outerRecord);
|
||||
final List<FieldValue> fieldValues = result.getSelectedFields().collect(Collectors.toList());
|
||||
final List<FieldValue> fieldValues = result.getSelectedFields().toList();
|
||||
if (fieldValues.isEmpty()) {
|
||||
throw new ProcessException("RecordPath " + dataRecordPath.getPath() + " evaluated against Record yielded no results.");
|
||||
}
|
||||
|
@ -1692,7 +1684,7 @@ public class PutDatabaseRecord extends AbstractProcessor {
|
|||
@Override
|
||||
public String apply(final Record record) {
|
||||
final RecordPathResult recordPathResult = recordPath.evaluate(record);
|
||||
final List<FieldValue> resultList = recordPathResult.getSelectedFields().distinct().collect(Collectors.toList());
|
||||
final List<FieldValue> resultList = recordPathResult.getSelectedFields().distinct().toList();
|
||||
if (resultList.isEmpty()) {
|
||||
throw new ProcessException("Evaluated RecordPath " + recordPath.getPath() + " against Record but got no results");
|
||||
}
|
||||
|
@ -1701,23 +1693,16 @@ public class PutDatabaseRecord extends AbstractProcessor {
|
|||
throw new ProcessException("Evaluated RecordPath " + recordPath.getPath() + " against Record and received multiple distinct results (" + resultList + ")");
|
||||
}
|
||||
|
||||
final String resultValue = String.valueOf(resultList.get(0).getValue()).toUpperCase();
|
||||
switch (resultValue) {
|
||||
case INSERT_TYPE:
|
||||
case UPDATE_TYPE:
|
||||
case DELETE_TYPE:
|
||||
case UPSERT_TYPE:
|
||||
return resultValue;
|
||||
case "C":
|
||||
case "R":
|
||||
return INSERT_TYPE;
|
||||
case "U":
|
||||
return UPDATE_TYPE;
|
||||
case "D":
|
||||
return DELETE_TYPE;
|
||||
}
|
||||
final String resultValue = String.valueOf(resultList.getFirst().getValue()).toUpperCase();
|
||||
|
||||
throw new ProcessException("Evaluated RecordPath " + recordPath.getPath() + " against Record to determine Statement Type but found invalid value: " + resultValue);
|
||||
return switch (resultValue) {
|
||||
case INSERT_TYPE, UPDATE_TYPE, DELETE_TYPE, UPSERT_TYPE -> resultValue;
|
||||
case "C", "R" -> INSERT_TYPE;
|
||||
case "U" -> UPDATE_TYPE;
|
||||
case "D" -> DELETE_TYPE;
|
||||
default ->
|
||||
throw new ProcessException("Evaluated RecordPath " + recordPath.getPath() + " against Record to determine Statement Type but found invalid value: " + resultValue);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,16 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
|
@ -53,6 +43,13 @@ import org.apache.nifi.processor.Relationship;
|
|||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
@SupportsBatching
|
||||
@Tags({"map", "cache", "put", "distributed"})
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
|
@ -108,6 +105,12 @@ public class PutDistributedMapCache extends AbstractProcessor {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.NONE)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
CACHE_ENTRY_IDENTIFIER,
|
||||
DISTRIBUTED_CACHE_SERVICE,
|
||||
CACHE_UPDATE_STRATEGY,
|
||||
CACHE_ENTRY_MAX_BYTES
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
|
@ -118,37 +121,28 @@ public class PutDistributedMapCache extends AbstractProcessor {
|
|||
.name("failure")
|
||||
.description("Any FlowFile that cannot be inserted into the cache will be routed to this relationship")
|
||||
.build();
|
||||
private final Set<Relationship> relationships;
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private final Serializer<String> keySerializer = new StringSerializer();
|
||||
private final Serializer<byte[]> valueSerializer = new CacheValueSerializer();
|
||||
private final Deserializer<byte[]> valueDeserializer = new CacheValueDeserializer();
|
||||
|
||||
public PutDistributedMapCache() {
|
||||
final Set<Relationship> rels = new HashSet<>();
|
||||
rels.add(REL_SUCCESS);
|
||||
rels.add(REL_FAILURE);
|
||||
relationships = Collections.unmodifiableSet(rels);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
descriptors.add(CACHE_ENTRY_IDENTIFIER);
|
||||
descriptors.add(DISTRIBUTED_CACHE_SERVICE);
|
||||
descriptors.add(CACHE_UPDATE_STRATEGY);
|
||||
descriptors.add(CACHE_ENTRY_MAX_BYTES);
|
||||
return descriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
|
||||
|
||||
FlowFile flowFile = session.get();
|
||||
if (flowFile == null) {
|
||||
return;
|
||||
|
|
|
@ -56,7 +56,6 @@ import org.apache.nifi.oauth2.OAuth2AccessTokenProvider;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
@ -68,10 +67,7 @@ import java.nio.charset.Charset;
|
|||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
@ -291,6 +287,31 @@ public class PutEmail extends AbstractProcessor {
|
|||
.defaultValue(StandardCharsets.UTF_8.name())
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
SMTP_HOSTNAME,
|
||||
SMTP_PORT,
|
||||
AUTHORIZATION_MODE,
|
||||
OAUTH2_ACCESS_TOKEN_PROVIDER,
|
||||
SMTP_USERNAME,
|
||||
SMTP_PASSWORD,
|
||||
SMTP_AUTH,
|
||||
SMTP_TLS,
|
||||
SMTP_SOCKET_FACTORY,
|
||||
HEADER_XMAILER,
|
||||
ATTRIBUTE_NAME_REGEX,
|
||||
CONTENT_TYPE,
|
||||
FROM,
|
||||
TO,
|
||||
CC,
|
||||
BCC,
|
||||
SUBJECT,
|
||||
MESSAGE,
|
||||
CONTENT_AS_MESSAGE,
|
||||
INPUT_CHARACTER_SET,
|
||||
ATTACH_FILE,
|
||||
INCLUDE_ALL_ATTRIBUTES
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("FlowFiles that are successfully sent will be routed to this relationship")
|
||||
|
@ -300,69 +321,33 @@ public class PutEmail extends AbstractProcessor {
|
|||
.description("FlowFiles that fail to send will be routed to this relationship")
|
||||
.build();
|
||||
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
|
||||
private Set<Relationship> relationships;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
/**
|
||||
* Mapping of the mail properties to the NiFi PropertyDescriptors that will be evaluated at runtime
|
||||
*/
|
||||
private static final Map<String, PropertyDescriptor> propertyToContext = new HashMap<>();
|
||||
|
||||
static {
|
||||
propertyToContext.put("mail.smtp.host", SMTP_HOSTNAME);
|
||||
propertyToContext.put("mail.smtp.port", SMTP_PORT);
|
||||
propertyToContext.put("mail.smtp.socketFactory.port", SMTP_PORT);
|
||||
propertyToContext.put("mail.smtp.socketFactory.class", SMTP_SOCKET_FACTORY);
|
||||
propertyToContext.put("mail.smtp.auth", SMTP_AUTH);
|
||||
propertyToContext.put("mail.smtp.starttls.enable", SMTP_TLS);
|
||||
propertyToContext.put("mail.smtp.user", SMTP_USERNAME);
|
||||
propertyToContext.put("mail.smtp.password", SMTP_PASSWORD);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(SMTP_HOSTNAME);
|
||||
properties.add(SMTP_PORT);
|
||||
properties.add(AUTHORIZATION_MODE);
|
||||
properties.add(OAUTH2_ACCESS_TOKEN_PROVIDER);
|
||||
properties.add(SMTP_USERNAME);
|
||||
properties.add(SMTP_PASSWORD);
|
||||
properties.add(SMTP_AUTH);
|
||||
properties.add(SMTP_TLS);
|
||||
properties.add(SMTP_SOCKET_FACTORY);
|
||||
properties.add(HEADER_XMAILER);
|
||||
properties.add(ATTRIBUTE_NAME_REGEX);
|
||||
properties.add(CONTENT_TYPE);
|
||||
properties.add(FROM);
|
||||
properties.add(TO);
|
||||
properties.add(CC);
|
||||
properties.add(BCC);
|
||||
properties.add(SUBJECT);
|
||||
properties.add(MESSAGE);
|
||||
properties.add(CONTENT_AS_MESSAGE);
|
||||
properties.add(INPUT_CHARACTER_SET);
|
||||
properties.add(ATTACH_FILE);
|
||||
properties.add(INCLUDE_ALL_ATTRIBUTES);
|
||||
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
relationships.add(REL_FAILURE);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
private static final Map<String, PropertyDescriptor> propertyToContext = Map.of(
|
||||
"mail.smtp.host", SMTP_HOSTNAME,
|
||||
"mail.smtp.port", SMTP_PORT,
|
||||
"mail.smtp.socketFactory.port", SMTP_PORT,
|
||||
"mail.smtp.socketFactory.class", SMTP_SOCKET_FACTORY,
|
||||
"mail.smtp.auth", SMTP_AUTH,
|
||||
"mail.smtp.starttls.enable", SMTP_TLS,
|
||||
"mail.smtp.user", SMTP_USERNAME,
|
||||
"mail.smtp.password", SMTP_PASSWORD
|
||||
);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -694,7 +679,7 @@ public class PutEmail extends AbstractProcessor {
|
|||
*/
|
||||
private String getEncoding(final ProcessContext context) {
|
||||
final Charset charset = Charset.forName(context.getProperty(INPUT_CHARACTER_SET).getValue());
|
||||
if (Charset.forName("US-ASCII").equals(charset)) {
|
||||
if (StandardCharsets.US_ASCII.equals(charset)) {
|
||||
return "7bit";
|
||||
}
|
||||
// Every other charset in StandardCharsets use 8 bits or more. Using base64 encoding by default
|
||||
|
|
|
@ -16,17 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperties;
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
|
@ -42,11 +31,20 @@ import org.apache.nifi.components.ValidationResult;
|
|||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||
import org.apache.nifi.flowfile.FlowFile;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.processor.util.file.transfer.PutFileTransfer;
|
||||
import org.apache.nifi.processors.standard.util.FTPTransfer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@SupportsBatching
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
@Tags({"remote", "copy", "egress", "put", "ftp", "archive", "files"})
|
||||
|
@ -71,49 +69,43 @@ public class PutFTP extends PutFileTransfer<FTPTransfer> {
|
|||
private final AtomicReference<List<PropertyDescriptor>> preSendDescriptorRef = new AtomicReference<>();
|
||||
private final AtomicReference<List<PropertyDescriptor>> postSendDescriptorRef = new AtomicReference<>();
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
|
||||
// PutFileTransfer.onTrigger() uses FlowFile attributes
|
||||
public static final PropertyDescriptor REMOTE_PATH = new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(FTPTransfer.REMOTE_PATH)
|
||||
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES).build();
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(FTPTransfer.HOSTNAME);
|
||||
properties.add(FTPTransfer.PORT);
|
||||
properties.add(FTPTransfer.USERNAME);
|
||||
properties.add(FTPTransfer.PASSWORD);
|
||||
properties.add(REMOTE_PATH);
|
||||
properties.add(FTPTransfer.CREATE_DIRECTORY);
|
||||
properties.add(FTPTransfer.BATCH_SIZE);
|
||||
properties.add(FTPTransfer.CONNECTION_TIMEOUT);
|
||||
properties.add(FTPTransfer.DATA_TIMEOUT);
|
||||
properties.add(FTPTransfer.CONFLICT_RESOLUTION);
|
||||
properties.add(FTPTransfer.DOT_RENAME);
|
||||
properties.add(FTPTransfer.TEMP_FILENAME);
|
||||
properties.add(FTPTransfer.TRANSFER_MODE);
|
||||
properties.add(FTPTransfer.CONNECTION_MODE);
|
||||
properties.add(FTPTransfer.REJECT_ZERO_BYTE);
|
||||
properties.add(FTPTransfer.LAST_MODIFIED_TIME);
|
||||
properties.add(FTPTransfer.PERMISSIONS);
|
||||
properties.add(FTPTransfer.USE_COMPRESSION);
|
||||
properties.add(FTPTransfer.PROXY_CONFIGURATION_SERVICE);
|
||||
properties.add(FTPTransfer.PROXY_TYPE);
|
||||
properties.add(FTPTransfer.PROXY_HOST);
|
||||
properties.add(FTPTransfer.PROXY_PORT);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_USERNAME);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_PASSWORD);
|
||||
properties.add(FTPTransfer.BUFFER_SIZE);
|
||||
properties.add(FTPTransfer.UTF8_ENCODING);
|
||||
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
}
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
FTPTransfer.HOSTNAME,
|
||||
FTPTransfer.PORT,
|
||||
FTPTransfer.USERNAME,
|
||||
FTPTransfer.PASSWORD,
|
||||
REMOTE_PATH,
|
||||
FTPTransfer.CREATE_DIRECTORY,
|
||||
FTPTransfer.BATCH_SIZE,
|
||||
FTPTransfer.CONNECTION_TIMEOUT,
|
||||
FTPTransfer.DATA_TIMEOUT,
|
||||
FTPTransfer.CONFLICT_RESOLUTION,
|
||||
FTPTransfer.DOT_RENAME,
|
||||
FTPTransfer.TEMP_FILENAME,
|
||||
FTPTransfer.TRANSFER_MODE,
|
||||
FTPTransfer.CONNECTION_MODE,
|
||||
FTPTransfer.REJECT_ZERO_BYTE,
|
||||
FTPTransfer.LAST_MODIFIED_TIME,
|
||||
FTPTransfer.PERMISSIONS,
|
||||
FTPTransfer.USE_COMPRESSION,
|
||||
FTPTransfer.PROXY_CONFIGURATION_SERVICE,
|
||||
FTPTransfer.PROXY_TYPE,
|
||||
FTPTransfer.PROXY_HOST,
|
||||
FTPTransfer.PROXY_PORT,
|
||||
FTPTransfer.HTTP_PROXY_USERNAME,
|
||||
FTPTransfer.HTTP_PROXY_PASSWORD,
|
||||
FTPTransfer.BUFFER_SIZE,
|
||||
FTPTransfer.UTF8_ENCODING
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
@ -52,9 +51,6 @@ import java.nio.file.attribute.UserPrincipalLookupService;
|
|||
import java.time.OffsetDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.Arrays;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
@ -167,6 +163,17 @@ public class PutFile extends AbstractProcessor {
|
|||
.defaultValue("true")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DIRECTORY,
|
||||
CONFLICT_RESOLUTION,
|
||||
CREATE_DIRS,
|
||||
MAX_DESTINATION_FILES,
|
||||
CHANGE_LAST_MODIFIED_TIME,
|
||||
CHANGE_PERMISSIONS,
|
||||
CHANGE_OWNER,
|
||||
CHANGE_GROUP
|
||||
);
|
||||
|
||||
public static final int MAX_FILE_LOCK_ATTEMPTS = 10;
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
|
@ -177,38 +184,19 @@ public class PutFile extends AbstractProcessor {
|
|||
.description("Files that could not be written to the output directory for some reason are transferred to this relationship")
|
||||
.build();
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
private Set<Relationship> relationships;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
// relationships
|
||||
final Set<Relationship> procRels = new HashSet<>();
|
||||
procRels.add(REL_SUCCESS);
|
||||
procRels.add(REL_FAILURE);
|
||||
relationships = Collections.unmodifiableSet(procRels);
|
||||
|
||||
// descriptors
|
||||
final List<PropertyDescriptor> supDescriptors = new ArrayList<>();
|
||||
supDescriptors.add(DIRECTORY);
|
||||
supDescriptors.add(CONFLICT_RESOLUTION);
|
||||
supDescriptors.add(CREATE_DIRS);
|
||||
supDescriptors.add(MAX_DESTINATION_FILES);
|
||||
supDescriptors.add(CHANGE_LAST_MODIFIED_TIME);
|
||||
supDescriptors.add(CHANGE_PERMISSIONS);
|
||||
supDescriptors.add(CHANGE_OWNER);
|
||||
supDescriptors.add(CHANGE_GROUP);
|
||||
properties = Collections.unmodifiableList(supDescriptors);
|
||||
}
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -40,10 +40,7 @@ import org.apache.nifi.util.StringUtils;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
@ -80,6 +77,12 @@ public class PutRecord extends AbstractProcessor {
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
RECORD_READER,
|
||||
RECORD_SINK,
|
||||
INCLUDE_ZERO_RECORD_RESULTS
|
||||
);
|
||||
|
||||
// Relationships
|
||||
static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
|
@ -95,33 +98,22 @@ public class PutRecord extends AbstractProcessor {
|
|||
.description("A FlowFile is routed to this relationship if the records could not be transmitted and retrying the operation will also fail")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> properties;
|
||||
private static final Set<Relationship> relationships;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE,
|
||||
REL_RETRY
|
||||
);
|
||||
|
||||
private volatile RecordSinkService recordSinkService;
|
||||
|
||||
static {
|
||||
final List<PropertyDescriptor> props = new ArrayList<>();
|
||||
props.add(RECORD_READER);
|
||||
props.add(RECORD_SINK);
|
||||
props.add(INCLUDE_ZERO_RECORD_RESULTS);
|
||||
properties = Collections.unmodifiableList(props);
|
||||
|
||||
final Set<Relationship> r = new HashSet<>();
|
||||
r.add(REL_SUCCESS);
|
||||
r.add(REL_FAILURE);
|
||||
r.add(REL_RETRY);
|
||||
relationships = Collections.unmodifiableSet(r);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
|
|
@ -16,11 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.SupportsBatching;
|
||||
|
@ -31,12 +26,15 @@ import org.apache.nifi.components.PropertyDescriptor;
|
|||
import org.apache.nifi.components.ValidationContext;
|
||||
import org.apache.nifi.components.ValidationResult;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
||||
import org.apache.nifi.processor.util.file.transfer.PutFileTransfer;
|
||||
import org.apache.nifi.processors.standard.util.FTPTransfer;
|
||||
import org.apache.nifi.processor.util.file.transfer.FileTransfer;
|
||||
import org.apache.nifi.processors.standard.util.SFTPTransfer;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
@SupportsBatching
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
@Tags({"remote", "copy", "egress", "put", "sftp", "archive", "files"})
|
||||
|
@ -44,51 +42,46 @@ import org.apache.nifi.processors.standard.util.SFTPTransfer;
|
|||
@SeeAlso(GetSFTP.class)
|
||||
public class PutSFTP extends PutFileTransfer<SFTPTransfer> {
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(FileTransfer.HOSTNAME);
|
||||
properties.add(SFTPTransfer.PORT);
|
||||
properties.add(FileTransfer.USERNAME);
|
||||
properties.add(FileTransfer.PASSWORD);
|
||||
properties.add(SFTPTransfer.PRIVATE_KEY_PATH);
|
||||
properties.add(SFTPTransfer.PRIVATE_KEY_PASSPHRASE);
|
||||
properties.add(FileTransfer.REMOTE_PATH);
|
||||
properties.add(FileTransfer.CREATE_DIRECTORY);
|
||||
properties.add(SFTPTransfer.DISABLE_DIRECTORY_LISTING);
|
||||
properties.add(FileTransfer.BATCH_SIZE);
|
||||
properties.add(FileTransfer.CONNECTION_TIMEOUT);
|
||||
properties.add(FileTransfer.DATA_TIMEOUT);
|
||||
properties.add(FileTransfer.CONFLICT_RESOLUTION);
|
||||
properties.add(FileTransfer.REJECT_ZERO_BYTE);
|
||||
properties.add(FileTransfer.DOT_RENAME);
|
||||
properties.add(FileTransfer.TEMP_FILENAME);
|
||||
properties.add(SFTPTransfer.HOST_KEY_FILE);
|
||||
properties.add(FileTransfer.LAST_MODIFIED_TIME);
|
||||
properties.add(FileTransfer.PERMISSIONS);
|
||||
properties.add(FileTransfer.REMOTE_OWNER);
|
||||
properties.add(FileTransfer.REMOTE_GROUP);
|
||||
properties.add(SFTPTransfer.STRICT_HOST_KEY_CHECKING);
|
||||
properties.add(SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT);
|
||||
properties.add(FileTransfer.USE_COMPRESSION);
|
||||
properties.add(SFTPTransfer.PROXY_CONFIGURATION_SERVICE);
|
||||
properties.add(FTPTransfer.PROXY_TYPE);
|
||||
properties.add(FTPTransfer.PROXY_HOST);
|
||||
properties.add(FTPTransfer.PROXY_PORT);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_USERNAME);
|
||||
properties.add(FTPTransfer.HTTP_PROXY_PASSWORD);
|
||||
properties.add(SFTPTransfer.CIPHERS_ALLOWED);
|
||||
properties.add(SFTPTransfer.KEY_ALGORITHMS_ALLOWED);
|
||||
properties.add(SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED);
|
||||
properties.add(SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
}
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
FileTransfer.HOSTNAME,
|
||||
SFTPTransfer.PORT,
|
||||
FileTransfer.USERNAME,
|
||||
FileTransfer.PASSWORD,
|
||||
SFTPTransfer.PRIVATE_KEY_PATH,
|
||||
SFTPTransfer.PRIVATE_KEY_PASSPHRASE,
|
||||
FileTransfer.REMOTE_PATH,
|
||||
FileTransfer.CREATE_DIRECTORY,
|
||||
SFTPTransfer.DISABLE_DIRECTORY_LISTING,
|
||||
FileTransfer.BATCH_SIZE,
|
||||
FileTransfer.CONNECTION_TIMEOUT,
|
||||
FileTransfer.DATA_TIMEOUT,
|
||||
FileTransfer.CONFLICT_RESOLUTION,
|
||||
FileTransfer.REJECT_ZERO_BYTE,
|
||||
FileTransfer.DOT_RENAME,
|
||||
FileTransfer.TEMP_FILENAME,
|
||||
SFTPTransfer.HOST_KEY_FILE,
|
||||
FileTransfer.LAST_MODIFIED_TIME,
|
||||
FileTransfer.PERMISSIONS,
|
||||
FileTransfer.REMOTE_OWNER,
|
||||
FileTransfer.REMOTE_GROUP,
|
||||
SFTPTransfer.STRICT_HOST_KEY_CHECKING,
|
||||
SFTPTransfer.USE_KEEPALIVE_ON_TIMEOUT,
|
||||
FileTransfer.USE_COMPRESSION,
|
||||
SFTPTransfer.PROXY_CONFIGURATION_SERVICE,
|
||||
FTPTransfer.PROXY_TYPE,
|
||||
FTPTransfer.PROXY_HOST,
|
||||
FTPTransfer.PROXY_PORT,
|
||||
FTPTransfer.HTTP_PROXY_USERNAME,
|
||||
FTPTransfer.HTTP_PROXY_PASSWORD,
|
||||
SFTPTransfer.CIPHERS_ALLOWED,
|
||||
SFTPTransfer.KEY_ALGORITHMS_ALLOWED,
|
||||
SFTPTransfer.KEY_EXCHANGE_ALGORITHMS_ALLOWED,
|
||||
SFTPTransfer.MESSAGE_AUTHENTICATION_CODES_ALLOWED
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.util.Optional;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.ReadsAttribute;
|
||||
|
@ -67,9 +66,9 @@ import java.util.BitSet;
|
|||
import java.util.Collection;
|
||||
import java.util.Comparator;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.BiFunction;
|
||||
|
@ -181,6 +180,17 @@ public class PutSQL extends AbstractSessionFactoryProcessor {
|
|||
.defaultValue("false")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
CONNECTION_POOL,
|
||||
SQL_STATEMENT,
|
||||
SUPPORT_TRANSACTIONS,
|
||||
AUTO_COMMIT,
|
||||
TRANSACTION_TIMEOUT,
|
||||
BATCH_SIZE,
|
||||
OBTAIN_GENERATED_KEYS,
|
||||
RollbackOnFailure.ROLLBACK_ON_FAILURE
|
||||
);
|
||||
|
||||
static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("A FlowFile is routed to this relationship after the database is successfully updated")
|
||||
|
@ -195,6 +205,12 @@ public class PutSQL extends AbstractSessionFactoryProcessor {
|
|||
+ "such as an invalid query or an integrity constraint violation")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_RETRY,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private static final String FRAGMENT_ID_ATTR = FragmentAttributes.FRAGMENT_ID.key();
|
||||
private static final String FRAGMENT_INDEX_ATTR = FragmentAttributes.FRAGMENT_INDEX.key();
|
||||
private static final String FRAGMENT_COUNT_ATTR = FragmentAttributes.FRAGMENT_COUNT.key();
|
||||
|
@ -205,16 +221,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor {
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(CONNECTION_POOL);
|
||||
properties.add(SQL_STATEMENT);
|
||||
properties.add(SUPPORT_TRANSACTIONS);
|
||||
properties.add(AUTO_COMMIT);
|
||||
properties.add(TRANSACTION_TIMEOUT);
|
||||
properties.add(BATCH_SIZE);
|
||||
properties.add(OBTAIN_GENERATED_KEYS);
|
||||
properties.add(RollbackOnFailure.ROLLBACK_ON_FAILURE);
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -247,11 +254,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor {
|
|||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
final Set<Relationship> rels = new HashSet<>();
|
||||
rels.add(REL_SUCCESS);
|
||||
rels.add(REL_RETRY);
|
||||
rels.add(REL_FAILURE);
|
||||
return rels;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
private static class FunctionContext extends RollbackOnFailure {
|
||||
|
@ -285,7 +288,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor {
|
|||
|
||||
private final PartialFunctions.InitConnection<FunctionContext, Connection> initConnection = (c, s, fc, ffs) -> {
|
||||
final Connection connection = c.getProperty(CONNECTION_POOL).asControllerService(DBCPService.class)
|
||||
.getConnection(ffs == null || ffs.isEmpty() ? emptyMap() : ffs.get(0).getAttributes());
|
||||
.getConnection(ffs == null || ffs.isEmpty() ? emptyMap() : ffs.getFirst().getAttributes());
|
||||
try {
|
||||
fc.originalAutoCommit = connection.getAutoCommit();
|
||||
final boolean autocommit = c.getProperty(AUTO_COMMIT).asBoolean();
|
||||
|
@ -337,7 +340,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor {
|
|||
|
||||
// Create a new PreparedStatement or reuse the one from the last group if that is the same.
|
||||
final StatementFlowFileEnclosure enclosure;
|
||||
final StatementFlowFileEnclosure lastEnclosure = groups.isEmpty() ? null : groups.get(groups.size() - 1);
|
||||
final StatementFlowFileEnclosure lastEnclosure = groups.isEmpty() ? null : groups.getLast();
|
||||
|
||||
if (lastEnclosure == null || !lastEnclosure.getSql().equals(sql)) {
|
||||
enclosure = new StatementFlowFileEnclosure(sql);
|
||||
|
@ -366,7 +369,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor {
|
|||
|
||||
// Create a new PreparedStatement or reuse the one from the last group if that is the same.
|
||||
final StatementFlowFileEnclosure enclosure;
|
||||
final StatementFlowFileEnclosure lastEnclosure = groups.isEmpty() ? null : groups.get(groups.size() - 1);
|
||||
final StatementFlowFileEnclosure lastEnclosure = groups.isEmpty() ? null : groups.getLast();
|
||||
|
||||
if (lastEnclosure == null || !lastEnclosure.getSql().equals(sql)) {
|
||||
enclosure = new StatementFlowFileEnclosure(sql);
|
||||
|
@ -452,7 +455,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor {
|
|||
String url = "jdbc://unknown-host";
|
||||
try {
|
||||
url = conn.getMetaData().getURL();
|
||||
} catch (final SQLException sqle) {
|
||||
} catch (final SQLException ignored) {
|
||||
}
|
||||
|
||||
// Emit a Provenance SEND event
|
||||
|
@ -679,10 +682,10 @@ public class PutSQL extends AbstractSessionFactoryProcessor {
|
|||
/**
|
||||
* Pulls a batch of FlowFiles from the incoming queues. If no FlowFiles are available, returns <code>null</code>.
|
||||
* Otherwise, a List of FlowFiles will be returned.
|
||||
*
|
||||
* <p>
|
||||
* If all FlowFiles pulled are not eligible to be processed, the FlowFiles will be penalized and transferred back
|
||||
* to the input queue and an empty List will be returned.
|
||||
*
|
||||
* <p>
|
||||
* Otherwise, if the Support Fragmented Transactions property is true, all FlowFiles that belong to the same
|
||||
* transaction will be sorted in the order that they should be evaluated.
|
||||
*
|
||||
|
@ -776,8 +779,7 @@ public class PutSQL extends AbstractSessionFactoryProcessor {
|
|||
session.read(flowFile, in -> StreamUtils.fillBuffer(in, buffer));
|
||||
|
||||
// Create the PreparedStatement to use for this FlowFile.
|
||||
final String sql = new String(buffer, StandardCharsets.UTF_8);
|
||||
return sql;
|
||||
return new String(buffer, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1092,11 +1094,10 @@ public class PutSQL extends AbstractSessionFactoryProcessor {
|
|||
if (obj == this) {
|
||||
return false;
|
||||
}
|
||||
if (!(obj instanceof StatementFlowFileEnclosure)) {
|
||||
if (!(obj instanceof StatementFlowFileEnclosure other)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final StatementFlowFileEnclosure other = (StatementFlowFileEnclosure) obj;
|
||||
return sql.equals(other.sql);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,19 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.nio.charset.Charset;
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import javax.net.ssl.SSLContext;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.TriggerWhenEmpty;
|
||||
|
@ -42,14 +29,13 @@ import org.apache.nifi.components.PropertyValue;
|
|||
import org.apache.nifi.components.ValidationContext;
|
||||
import org.apache.nifi.components.ValidationResult;
|
||||
import org.apache.nifi.event.transport.EventSender;
|
||||
import org.apache.nifi.event.transport.configuration.TransportProtocol;
|
||||
import org.apache.nifi.event.transport.configuration.LineEnding;
|
||||
import org.apache.nifi.event.transport.configuration.TransportProtocol;
|
||||
import org.apache.nifi.event.transport.netty.StringNettyEventSenderFactory;
|
||||
import org.apache.nifi.expression.ExpressionLanguageScope;
|
||||
import org.apache.nifi.flowfile.FlowFile;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
@ -57,6 +43,17 @@ import org.apache.nifi.ssl.SSLContextService;
|
|||
import org.apache.nifi.syslog.parsers.SyslogParser;
|
||||
import org.apache.nifi.util.StopWatch;
|
||||
|
||||
import javax.net.ssl.SSLContext;
|
||||
import java.nio.charset.Charset;
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
|
||||
@TriggerWhenEmpty
|
||||
@Tags({"syslog", "put", "udp", "tcp", "logs"})
|
||||
|
@ -152,6 +149,23 @@ public class PutSyslog extends AbstractSyslogProcessor {
|
|||
.dependsOn(PROTOCOL, TCP_VALUE)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
HOSTNAME,
|
||||
PROTOCOL,
|
||||
PORT,
|
||||
MAX_SOCKET_SEND_BUFFER_SIZE,
|
||||
SSL_CONTEXT_SERVICE,
|
||||
IDLE_EXPIRATION,
|
||||
TIMEOUT,
|
||||
BATCH_SIZE,
|
||||
CHARSET,
|
||||
MSG_PRIORITY,
|
||||
MSG_VERSION,
|
||||
MSG_TIMESTAMP,
|
||||
MSG_HOSTNAME,
|
||||
MSG_BODY
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("FlowFiles that are sent successfully to Syslog are sent out this relationship.")
|
||||
|
@ -165,46 +179,23 @@ public class PutSyslog extends AbstractSyslogProcessor {
|
|||
.description("FlowFiles that do not form a valid Syslog message are sent out this relationship.")
|
||||
.build();
|
||||
|
||||
private Set<Relationship> relationships;
|
||||
private List<PropertyDescriptor> descriptors;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE,
|
||||
REL_INVALID
|
||||
);
|
||||
|
||||
private EventSender<String> eventSender;
|
||||
private String transitUri;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
descriptors.add(HOSTNAME);
|
||||
descriptors.add(PROTOCOL);
|
||||
descriptors.add(PORT);
|
||||
descriptors.add(MAX_SOCKET_SEND_BUFFER_SIZE);
|
||||
descriptors.add(SSL_CONTEXT_SERVICE);
|
||||
descriptors.add(IDLE_EXPIRATION);
|
||||
descriptors.add(TIMEOUT);
|
||||
descriptors.add(BATCH_SIZE);
|
||||
descriptors.add(CHARSET);
|
||||
descriptors.add(MSG_PRIORITY);
|
||||
descriptors.add(MSG_VERSION);
|
||||
descriptors.add(MSG_TIMESTAMP);
|
||||
descriptors.add(MSG_HOSTNAME);
|
||||
descriptors.add(MSG_BODY);
|
||||
this.descriptors = Collections.unmodifiableList(descriptors);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
relationships.add(REL_FAILURE);
|
||||
relationships.add(REL_INVALID);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return this.relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return descriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -50,8 +50,6 @@ import java.io.ByteArrayOutputStream;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
@ -74,17 +72,17 @@ public class PutTCP extends AbstractPutEventProcessor<InputStream> {
|
|||
.description("Specifies the strategy used for reading input FlowFiles and transmitting messages to the destination socket address")
|
||||
.required(true)
|
||||
.allowableValues(TransmissionStrategy.class)
|
||||
.defaultValue(TransmissionStrategy.FLOWFILE_ORIENTED.getValue())
|
||||
.defaultValue(TransmissionStrategy.FLOWFILE_ORIENTED)
|
||||
.build();
|
||||
|
||||
static final PropertyDescriptor DEPENDENT_CHARSET = new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(CHARSET)
|
||||
.dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.FLOWFILE_ORIENTED.getValue())
|
||||
.dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.FLOWFILE_ORIENTED)
|
||||
.build();
|
||||
|
||||
static final PropertyDescriptor DEPENDENT_OUTGOING_MESSAGE_DELIMITER = new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(OUTGOING_MESSAGE_DELIMITER)
|
||||
.dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.FLOWFILE_ORIENTED.getValue())
|
||||
.dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.FLOWFILE_ORIENTED)
|
||||
.build();
|
||||
|
||||
static final PropertyDescriptor RECORD_READER = new PropertyDescriptor.Builder()
|
||||
|
@ -93,7 +91,7 @@ public class PutTCP extends AbstractPutEventProcessor<InputStream> {
|
|||
.description("Specifies the Controller Service to use for reading Records from input FlowFiles")
|
||||
.identifiesControllerService(RecordReaderFactory.class)
|
||||
.required(true)
|
||||
.dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.RECORD_ORIENTED.getValue())
|
||||
.dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.RECORD_ORIENTED)
|
||||
.build();
|
||||
|
||||
static final PropertyDescriptor RECORD_WRITER = new PropertyDescriptor.Builder()
|
||||
|
@ -102,10 +100,10 @@ public class PutTCP extends AbstractPutEventProcessor<InputStream> {
|
|||
.description("Specifies the Controller Service to use for writing Records to the configured socket address")
|
||||
.identifiesControllerService(RecordSetWriterFactory.class)
|
||||
.required(true)
|
||||
.dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.RECORD_ORIENTED.getValue())
|
||||
.dependsOn(TRANSMISSION_STRATEGY, TransmissionStrategy.RECORD_ORIENTED)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> ADDITIONAL_PROPERTIES = Collections.unmodifiableList(Arrays.asList(
|
||||
private static final List<PropertyDescriptor> ADDITIONAL_PROPERTIES = List.of(
|
||||
CONNECTION_PER_FLOWFILE,
|
||||
SSL_CONTEXT_SERVICE,
|
||||
TRANSMISSION_STRATEGY,
|
||||
|
@ -113,7 +111,7 @@ public class PutTCP extends AbstractPutEventProcessor<InputStream> {
|
|||
DEPENDENT_CHARSET,
|
||||
RECORD_READER,
|
||||
RECORD_WRITER
|
||||
));
|
||||
);
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getAdditionalProperties() {
|
||||
|
@ -128,7 +126,7 @@ public class PutTCP extends AbstractPutEventProcessor<InputStream> {
|
|||
return;
|
||||
}
|
||||
|
||||
final TransmissionStrategy transmissionStrategy = TransmissionStrategy.valueOf(context.getProperty(TRANSMISSION_STRATEGY).getValue());
|
||||
final TransmissionStrategy transmissionStrategy = context.getProperty(TRANSMISSION_STRATEGY).asAllowableValue(TransmissionStrategy.class);
|
||||
final StopWatch stopWatch = new StopWatch(true);
|
||||
try {
|
||||
final int recordCount;
|
||||
|
|
|
@ -39,16 +39,13 @@ import org.apache.nifi.processors.standard.sql.SqlWriter;
|
|||
import org.apache.nifi.scheduling.SchedulingStrategy;
|
||||
import org.apache.nifi.util.db.JdbcCommon;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.nifi.util.db.JdbcProperties.NORMALIZE_NAMES_FOR_AVRO;
|
||||
import static org.apache.nifi.util.db.JdbcProperties.USE_AVRO_LOGICAL_TYPES;
|
||||
import static org.apache.nifi.util.db.JdbcProperties.VARIABLE_REGISTRY_ONLY_DEFAULT_PRECISION;
|
||||
import static org.apache.nifi.util.db.JdbcProperties.VARIABLE_REGISTRY_ONLY_DEFAULT_SCALE;
|
||||
import static org.apache.nifi.util.db.JdbcProperties.USE_AVRO_LOGICAL_TYPES;
|
||||
|
||||
|
||||
@TriggerSerially
|
||||
|
@ -90,36 +87,38 @@ import static org.apache.nifi.util.db.JdbcProperties.USE_AVRO_LOGICAL_TYPES;
|
|||
@DefaultSchedule(strategy = SchedulingStrategy.TIMER_DRIVEN, period = "1 min")
|
||||
public class QueryDatabaseTable extends AbstractQueryDatabaseTable {
|
||||
|
||||
static final PropertyDescriptor TABLE_NAME = new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(AbstractDatabaseFetchProcessor.TABLE_NAME)
|
||||
.description("The name of the database table to be queried. When a custom query is used, this property is used to alias the query and appears as an attribute on the FlowFile.")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DBCP_SERVICE,
|
||||
DB_TYPE,
|
||||
TABLE_NAME,
|
||||
COLUMN_NAMES,
|
||||
WHERE_CLAUSE,
|
||||
SQL_QUERY,
|
||||
MAX_VALUE_COLUMN_NAMES,
|
||||
INITIAL_LOAD_STRATEGY,
|
||||
QUERY_TIMEOUT,
|
||||
FETCH_SIZE,
|
||||
AUTO_COMMIT,
|
||||
MAX_ROWS_PER_FLOW_FILE,
|
||||
OUTPUT_BATCH_SIZE,
|
||||
MAX_FRAGMENTS,
|
||||
NORMALIZE_NAMES_FOR_AVRO,
|
||||
TRANS_ISOLATION_LEVEL,
|
||||
USE_AVRO_LOGICAL_TYPES,
|
||||
VARIABLE_REGISTRY_ONLY_DEFAULT_PRECISION,
|
||||
VARIABLE_REGISTRY_ONLY_DEFAULT_SCALE
|
||||
);
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
public QueryDatabaseTable() {
|
||||
final Set<Relationship> r = new HashSet<>();
|
||||
r.add(REL_SUCCESS);
|
||||
relationships = Collections.unmodifiableSet(r);
|
||||
|
||||
final List<PropertyDescriptor> pds = new ArrayList<>();
|
||||
pds.add(DBCP_SERVICE);
|
||||
pds.add(DB_TYPE);
|
||||
pds.add(new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(TABLE_NAME)
|
||||
.description("The name of the database table to be queried. When a custom query is used, this property is used to alias the query and appears as an attribute on the FlowFile.")
|
||||
.build());
|
||||
pds.add(COLUMN_NAMES);
|
||||
pds.add(WHERE_CLAUSE);
|
||||
pds.add(SQL_QUERY);
|
||||
pds.add(MAX_VALUE_COLUMN_NAMES);
|
||||
pds.add(INITIAL_LOAD_STRATEGY);
|
||||
pds.add(QUERY_TIMEOUT);
|
||||
pds.add(FETCH_SIZE);
|
||||
pds.add(AUTO_COMMIT);
|
||||
pds.add(MAX_ROWS_PER_FLOW_FILE);
|
||||
pds.add(OUTPUT_BATCH_SIZE);
|
||||
pds.add(MAX_FRAGMENTS);
|
||||
pds.add(NORMALIZE_NAMES_FOR_AVRO);
|
||||
pds.add(TRANS_ISOLATION_LEVEL);
|
||||
pds.add(USE_AVRO_LOGICAL_TYPES);
|
||||
pds.add(VARIABLE_REGISTRY_ONLY_DEFAULT_PRECISION);
|
||||
pds.add(VARIABLE_REGISTRY_ONLY_DEFAULT_SCALE);
|
||||
|
||||
propDescriptors = Collections.unmodifiableList(pds);
|
||||
relationships = RELATIONSHIPS;
|
||||
propDescriptors = PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -43,9 +43,7 @@ import org.apache.nifi.scheduling.SchedulingStrategy;
|
|||
import org.apache.nifi.serialization.RecordSetWriterFactory;
|
||||
import org.apache.nifi.util.db.JdbcCommon;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -170,6 +168,11 @@ import static org.apache.nifi.util.db.JdbcProperties.VARIABLE_REGISTRY_ONLY_DEFA
|
|||
)
|
||||
public class QueryDatabaseTableRecord extends AbstractQueryDatabaseTable {
|
||||
|
||||
public static final PropertyDescriptor TABLE_NAME = new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(AbstractDatabaseFetchProcessor.TABLE_NAME)
|
||||
.description("The name of the database table to be queried. When a custom query is used, this property is used to alias the query and appears as an attribute on the FlowFile.")
|
||||
.build();
|
||||
|
||||
public static final PropertyDescriptor RECORD_WRITER_FACTORY = new PropertyDescriptor.Builder()
|
||||
.name("qdbtr-record-writer")
|
||||
.displayName("Record Writer")
|
||||
|
@ -188,36 +191,33 @@ public class QueryDatabaseTableRecord extends AbstractQueryDatabaseTable {
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DBCP_SERVICE,
|
||||
DB_TYPE,
|
||||
TABLE_NAME,
|
||||
COLUMN_NAMES,
|
||||
WHERE_CLAUSE,
|
||||
SQL_QUERY,
|
||||
RECORD_WRITER_FACTORY,
|
||||
MAX_VALUE_COLUMN_NAMES,
|
||||
INITIAL_LOAD_STRATEGY,
|
||||
QUERY_TIMEOUT,
|
||||
FETCH_SIZE,
|
||||
AUTO_COMMIT,
|
||||
MAX_ROWS_PER_FLOW_FILE,
|
||||
OUTPUT_BATCH_SIZE,
|
||||
MAX_FRAGMENTS,
|
||||
NORMALIZE_NAMES,
|
||||
USE_AVRO_LOGICAL_TYPES,
|
||||
VARIABLE_REGISTRY_ONLY_DEFAULT_PRECISION,
|
||||
VARIABLE_REGISTRY_ONLY_DEFAULT_SCALE
|
||||
);
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(REL_SUCCESS);
|
||||
|
||||
public QueryDatabaseTableRecord() {
|
||||
final Set<Relationship> r = new HashSet<>();
|
||||
r.add(REL_SUCCESS);
|
||||
relationships = Collections.unmodifiableSet(r);
|
||||
|
||||
final List<PropertyDescriptor> pds = new ArrayList<>();
|
||||
pds.add(DBCP_SERVICE);
|
||||
pds.add(DB_TYPE);
|
||||
pds.add(new PropertyDescriptor.Builder()
|
||||
.fromPropertyDescriptor(TABLE_NAME)
|
||||
.description("The name of the database table to be queried. When a custom query is used, this property is used to alias the query and appears as an attribute on the FlowFile.")
|
||||
.build());
|
||||
pds.add(COLUMN_NAMES);
|
||||
pds.add(WHERE_CLAUSE);
|
||||
pds.add(SQL_QUERY);
|
||||
pds.add(RECORD_WRITER_FACTORY);
|
||||
pds.add(MAX_VALUE_COLUMN_NAMES);
|
||||
pds.add(INITIAL_LOAD_STRATEGY);
|
||||
pds.add(QUERY_TIMEOUT);
|
||||
pds.add(FETCH_SIZE);
|
||||
pds.add(AUTO_COMMIT);
|
||||
pds.add(MAX_ROWS_PER_FLOW_FILE);
|
||||
pds.add(OUTPUT_BATCH_SIZE);
|
||||
pds.add(MAX_FRAGMENTS);
|
||||
pds.add(NORMALIZE_NAMES);
|
||||
pds.add(USE_AVRO_LOGICAL_TYPES);
|
||||
pds.add(VARIABLE_REGISTRY_ONLY_DEFAULT_PRECISION);
|
||||
pds.add(VARIABLE_REGISTRY_ONLY_DEFAULT_SCALE);
|
||||
|
||||
propDescriptors = Collections.unmodifiableList(pds);
|
||||
relationships = RELATIONSHIPS;
|
||||
propDescriptors = PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -228,6 +228,15 @@ public class QueryRecord extends AbstractProcessor {
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
RECORD_READER_FACTORY,
|
||||
RECORD_WRITER_FACTORY,
|
||||
INCLUDE_ZERO_RECORD_FLOWFILES,
|
||||
CACHE_SCHEMA,
|
||||
DEFAULT_PRECISION,
|
||||
DEFAULT_SCALE
|
||||
);
|
||||
|
||||
public static final Relationship REL_ORIGINAL = new Relationship.Builder()
|
||||
.name("original")
|
||||
.description("The original FlowFile is routed to this relationship")
|
||||
|
@ -239,7 +248,6 @@ public class QueryRecord extends AbstractProcessor {
|
|||
+ "be routed to this relationship")
|
||||
.build();
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
private final Set<Relationship> relationships = Collections.synchronizedSet(new HashSet<>());
|
||||
|
||||
private final Cache<Tuple<String, RecordSchema>, BlockingQueue<CachedStatement>> statementQueues = Caffeine.newBuilder()
|
||||
|
@ -249,14 +257,6 @@ public class QueryRecord extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
this.properties = List.of(
|
||||
RECORD_READER_FACTORY,
|
||||
RECORD_WRITER_FACTORY,
|
||||
INCLUDE_ZERO_RECORD_FLOWFILES,
|
||||
CACHE_SCHEMA,
|
||||
DEFAULT_PRECISION,
|
||||
DEFAULT_SCALE);
|
||||
|
||||
relationships.add(REL_FAILURE);
|
||||
relationships.add(REL_ORIGINAL);
|
||||
}
|
||||
|
@ -268,7 +268,7 @@ public class QueryRecord extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -17,10 +17,6 @@
|
|||
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
|
@ -45,6 +41,11 @@ import org.apache.nifi.record.path.util.RecordPathCache;
|
|||
import org.apache.nifi.record.path.validation.RecordPathValidator;
|
||||
import org.apache.nifi.serialization.record.Record;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
|
@ -117,7 +118,7 @@ public class RemoveRecordField extends AbstractRecordProcessor {
|
|||
return validationResults;
|
||||
}
|
||||
|
||||
return Collections.singleton(new ValidationResult.Builder()
|
||||
return Set.of(new ValidationResult.Builder()
|
||||
.subject("User-defined Properties")
|
||||
.valid(false)
|
||||
.explanation("at least one RecordPath must be specified")
|
||||
|
|
|
@ -17,12 +17,6 @@
|
|||
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.SideEffectFree;
|
||||
|
@ -48,6 +42,14 @@ import org.apache.nifi.record.path.validation.RecordPathPropertyNameValidator;
|
|||
import org.apache.nifi.serialization.record.Record;
|
||||
import org.apache.nifi.serialization.record.util.DataTypeUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
@InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED)
|
||||
|
@ -147,7 +149,7 @@ public class RenameRecordField extends AbstractRecordProcessor {
|
|||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
return Collections.singleton(new ValidationResult.Builder()
|
||||
return Set.of(new ValidationResult.Builder()
|
||||
.subject("User-defined Properties")
|
||||
.valid(false)
|
||||
.explanation("At least one RecordPath must be specified")
|
||||
|
|
|
@ -45,7 +45,6 @@ import org.apache.nifi.processor.AbstractProcessor;
|
|||
import org.apache.nifi.processor.DataUnit;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.io.StreamCallback;
|
||||
|
@ -63,9 +62,7 @@ import java.nio.BufferOverflowException;
|
|||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -144,7 +141,7 @@ import java.util.regex.Pattern;
|
|||
)
|
||||
public class ReplaceText extends AbstractProcessor {
|
||||
|
||||
private static Pattern REPLACEMENT_NORMALIZATION_PATTERN = Pattern.compile("(\\$\\D)");
|
||||
private static final Pattern REPLACEMENT_NORMALIZATION_PATTERN = Pattern.compile("(\\$\\D)");
|
||||
|
||||
// Constants
|
||||
public static final String LINE_BY_LINE = "Line-by-Line";
|
||||
|
@ -193,7 +190,6 @@ public class ReplaceText extends AbstractProcessor {
|
|||
"Substitute variable references (specified in ${var} form) using FlowFile attributes for looking up the replacement value by variable name. "
|
||||
+ "When this strategy is chosen, both the <Search Value> and <Replacement Value> properties are ignored.");
|
||||
|
||||
|
||||
public static final PropertyDescriptor REPLACEMENT_STRATEGY = new PropertyDescriptor.Builder()
|
||||
.name("Replacement Strategy")
|
||||
.description("The strategy for how and what to replace within the FlowFile's text content.")
|
||||
|
@ -278,7 +274,17 @@ public class ReplaceText extends AbstractProcessor {
|
|||
.required(false)
|
||||
.build();
|
||||
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
REPLACEMENT_STRATEGY,
|
||||
SEARCH_VALUE,
|
||||
REPLACEMENT_VALUE,
|
||||
PREPEND_TEXT,
|
||||
APPEND_TEXT,
|
||||
CHARACTER_SET,
|
||||
MAX_BUFFER_SIZE,
|
||||
EVALUATION_MODE,
|
||||
LINE_BY_LINE_EVALUATION_MODE
|
||||
);
|
||||
|
||||
// Relationships
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
|
@ -291,38 +297,21 @@ public class ReplaceText extends AbstractProcessor {
|
|||
.description("FlowFiles that could not be updated are routed to this relationship")
|
||||
.build();
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
private Set<Relationship> relationships;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private ReplacementStrategyExecutor replacementStrategyExecutor;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(REPLACEMENT_STRATEGY);
|
||||
properties.add(SEARCH_VALUE);
|
||||
properties.add(REPLACEMENT_VALUE);
|
||||
properties.add(PREPEND_TEXT);
|
||||
properties.add(APPEND_TEXT);
|
||||
properties.add(CHARACTER_SET);
|
||||
properties.add(MAX_BUFFER_SIZE);
|
||||
properties.add(EVALUATION_MODE);
|
||||
properties.add(LINE_BY_LINE_EVALUATION_MODE);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
relationships.add(REL_FAILURE);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.apache.nifi.processor.AbstractProcessor;
|
|||
import org.apache.nifi.processor.DataUnit;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.io.StreamCallback;
|
||||
|
@ -54,9 +53,7 @@ import java.io.OutputStream;
|
|||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -120,6 +117,15 @@ public class ReplaceTextWithMapping extends AbstractProcessor {
|
|||
.defaultValue("1 MB")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
REGEX,
|
||||
MATCHING_GROUP_FOR_LOOKUP_KEY,
|
||||
MAPPING_FILE,
|
||||
MAPPING_FILE_REFRESH_INTERVAL,
|
||||
CHARACTER_SET,
|
||||
MAX_BUFFER_SIZE
|
||||
);
|
||||
|
||||
public static final Relationship REL_SUCCESS = new Relationship.Builder()
|
||||
.name("success")
|
||||
.description("FlowFiles that have been successfully updated are routed to this relationship, as well as FlowFiles whose content does not match the given Regular Expression")
|
||||
|
@ -129,10 +135,12 @@ public class ReplaceTextWithMapping extends AbstractProcessor {
|
|||
.description("FlowFiles that could not be updated are routed to this relationship")
|
||||
.build();
|
||||
|
||||
private final Pattern backReferencePattern = Pattern.compile("[^\\\\]\\$(\\d+)");
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
private Set<Relationship> relationships;
|
||||
private final Pattern backReferencePattern = Pattern.compile("[^\\\\]\\$(\\d+)");
|
||||
|
||||
private final ReentrantLock processorLock = new ReentrantLock();
|
||||
private final AtomicLong lastModified = new AtomicLong(0L);
|
||||
|
@ -158,31 +166,14 @@ public class ReplaceTextWithMapping extends AbstractProcessor {
|
|||
return errors;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(REGEX);
|
||||
properties.add(MATCHING_GROUP_FOR_LOOKUP_KEY);
|
||||
properties.add(MAPPING_FILE);
|
||||
properties.add(MAPPING_FILE_REFRESH_INTERVAL);
|
||||
properties.add(CHARACTER_SET);
|
||||
properties.add(MAX_BUFFER_SIZE);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SUCCESS);
|
||||
relationships.add(REL_FAILURE);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -303,7 +294,7 @@ public class ReplaceTextWithMapping extends AbstractProcessor {
|
|||
}
|
||||
|
||||
public Map<String, String> getMapping() {
|
||||
return Collections.unmodifiableMap(mapping);
|
||||
return Map.copyOf(mapping);
|
||||
}
|
||||
|
||||
public boolean isConfigured() {
|
||||
|
|
|
@ -34,15 +34,11 @@ import org.apache.nifi.logging.LogLevel;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.util.StringUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -74,13 +70,6 @@ import java.util.Set;
|
|||
"the 'retries_exceeded' relationship",
|
||||
expressionLanguageScope = ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
|
||||
public class RetryFlowFile extends AbstractProcessor {
|
||||
private List<PropertyDescriptor> properties;
|
||||
private Set<Relationship> relationships;
|
||||
private String retryAttribute;
|
||||
private Boolean penalizeRetried;
|
||||
private Boolean failOnOverwrite;
|
||||
private String reuseMode;
|
||||
private String lastRetriedBy;
|
||||
|
||||
public static final PropertyDescriptor RETRY_ATTRIBUTE = new PropertyDescriptor.Builder()
|
||||
.name("retry-attribute")
|
||||
|
@ -151,6 +140,14 @@ public class RetryFlowFile extends AbstractProcessor {
|
|||
.defaultValue(FAIL_ON_REUSE.getValue())
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
RETRY_ATTRIBUTE,
|
||||
MAXIMUM_RETRIES,
|
||||
PENALIZE_RETRIED,
|
||||
FAIL_ON_OVERWRITE,
|
||||
REUSE_MODE
|
||||
);
|
||||
|
||||
public static final Relationship RETRY = new Relationship.Builder()
|
||||
.name("retry")
|
||||
.description("Input FlowFile has not exceeded the configured maximum retry count, pass this " +
|
||||
|
@ -170,26 +167,21 @@ public class RetryFlowFile extends AbstractProcessor {
|
|||
.autoTerminateDefault(true)
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
RETRY,
|
||||
RETRIES_EXCEEDED,
|
||||
FAILURE
|
||||
);
|
||||
|
||||
private String retryAttribute;
|
||||
private Boolean penalizeRetried;
|
||||
private Boolean failOnOverwrite;
|
||||
private String reuseMode;
|
||||
private String lastRetriedBy;
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void init(ProcessorInitializationContext context) {
|
||||
List<PropertyDescriptor> props = new ArrayList<>();
|
||||
props.add(RETRY_ATTRIBUTE);
|
||||
props.add(MAXIMUM_RETRIES);
|
||||
props.add(PENALIZE_RETRIED);
|
||||
props.add(FAIL_ON_OVERWRITE);
|
||||
props.add(REUSE_MODE);
|
||||
this.properties = Collections.unmodifiableList(props);
|
||||
|
||||
Set<Relationship> rels = new HashSet<>();
|
||||
rels.add(RETRY);
|
||||
rels.add(RETRIES_EXCEEDED);
|
||||
rels.add(FAILURE);
|
||||
this.relationships = Collections.unmodifiableSet(rels);
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -207,7 +199,7 @@ public class RetryFlowFile extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
@ -231,7 +223,7 @@ public class RetryFlowFile extends AbstractProcessor {
|
|||
try {
|
||||
currentRetry = (null == retryAttributeValue)
|
||||
? 1
|
||||
: Integer.valueOf(retryAttributeValue.trim()) + 1;
|
||||
: Integer.parseInt(retryAttributeValue.trim()) + 1;
|
||||
} catch (NumberFormatException ex) {
|
||||
// Configured to fail if this was not a number
|
||||
if (failOnOverwrite) {
|
||||
|
|
|
@ -41,12 +41,9 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
|
@ -195,6 +192,8 @@ public class RouteOnAttribute extends AbstractProcessor {
|
|||
.defaultValue(ROUTE_PROPERTY_NAME.getValue())
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(ROUTE_STRATEGY);
|
||||
|
||||
public static final Relationship REL_NO_MATCH = new Relationship.Builder()
|
||||
.name("unmatched")
|
||||
.description("FlowFiles that do not match any user-define expression will be routed here")
|
||||
|
@ -204,8 +203,7 @@ public class RouteOnAttribute extends AbstractProcessor {
|
|||
.description("FlowFiles will be routed to 'match' if one or all Expressions match, depending on the configuration of the Routing Strategy property")
|
||||
.build();
|
||||
|
||||
private AtomicReference<Set<Relationship>> relationships = new AtomicReference<>();
|
||||
private List<PropertyDescriptor> properties;
|
||||
private final AtomicReference<Set<Relationship>> relationships = new AtomicReference<>(Set.of(REL_NO_MATCH));
|
||||
private volatile String configuredRouteStrategy = ROUTE_STRATEGY.getDefaultValue();
|
||||
private volatile Set<String> dynamicPropertyNames = new HashSet<>();
|
||||
|
||||
|
@ -215,17 +213,6 @@ public class RouteOnAttribute extends AbstractProcessor {
|
|||
*/
|
||||
private volatile Map<Relationship, PropertyValue> propertyMap = new HashMap<>();
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final Set<Relationship> set = new HashSet<>();
|
||||
set.add(REL_NO_MATCH);
|
||||
relationships = new AtomicReference<>(set);
|
||||
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(ROUTE_STRATEGY);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships.get();
|
||||
|
@ -233,7 +220,7 @@ public class RouteOnAttribute extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -259,7 +246,7 @@ public class RouteOnAttribute extends AbstractProcessor {
|
|||
newDynamicPropertyNames.add(descriptor.getName());
|
||||
}
|
||||
|
||||
this.dynamicPropertyNames = Collections.unmodifiableSet(newDynamicPropertyNames);
|
||||
this.dynamicPropertyNames = Set.copyOf(newDynamicPropertyNames);
|
||||
}
|
||||
|
||||
// formulate the new set of Relationships
|
||||
|
|
|
@ -16,19 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.regex.Pattern;
|
||||
import org.apache.nifi.annotation.behavior.DynamicProperty;
|
||||
import org.apache.nifi.annotation.behavior.DynamicRelationship;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
|
@ -46,12 +33,23 @@ import org.apache.nifi.processor.AbstractProcessor;
|
|||
import org.apache.nifi.processor.DataUnit;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.io.InputStreamCallback;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.stream.io.StreamUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
@InputRequirement(Requirement.INPUT_REQUIRED)
|
||||
|
@ -96,30 +94,22 @@ public class RouteOnContent extends AbstractProcessor {
|
|||
.defaultValue("UTF-8")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
MATCH_REQUIREMENT,
|
||||
CHARACTER_SET,
|
||||
BUFFER_SIZE
|
||||
);
|
||||
|
||||
public static final Relationship REL_NO_MATCH = new Relationship.Builder()
|
||||
.name("unmatched")
|
||||
.description("FlowFiles that do not match any of the user-supplied regular expressions will be routed to this relationship")
|
||||
.build();
|
||||
|
||||
private final AtomicReference<Set<Relationship>> relationships = new AtomicReference<>();
|
||||
private List<PropertyDescriptor> properties;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_NO_MATCH);
|
||||
this.relationships.set(Collections.unmodifiableSet(relationships));
|
||||
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(MATCH_REQUIREMENT);
|
||||
properties.add(CHARACTER_SET);
|
||||
properties.add(BUFFER_SIZE);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
}
|
||||
private final AtomicReference<Set<Relationship>> relationships = new AtomicReference<>(Set.of(REL_NO_MATCH));
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -44,7 +44,6 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.io.InputStreamCallback;
|
||||
import org.apache.nifi.processor.io.OutputStreamCallback;
|
||||
|
@ -131,7 +130,6 @@ public class RouteText extends AbstractProcessor {
|
|||
private static final String containsRegularExpressionValue = "Contains Regular Expression";
|
||||
private static final String satisfiesExpression = "Satisfies Expression";
|
||||
|
||||
|
||||
public static final AllowableValue ROUTE_TO_MATCHING_PROPERTY_NAME = new AllowableValue(routePropertyNameValue, routePropertyNameValue,
|
||||
"Lines will be routed to each relationship whose corresponding expression evaluates to 'true'");
|
||||
public static final AllowableValue ROUTE_TO_MATCHED_WHEN_ALL_PROPERTIES_MATCH = new AllowableValue(routeAllMatchValue, routeAllMatchValue,
|
||||
|
@ -212,6 +210,15 @@ public class RouteText extends AbstractProcessor {
|
|||
.defaultValue("UTF-8")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
ROUTE_STRATEGY,
|
||||
MATCH_STRATEGY,
|
||||
CHARACTER_SET,
|
||||
TRIM_WHITESPACE,
|
||||
IGNORE_CASE,
|
||||
GROUPING_REGEX
|
||||
);
|
||||
|
||||
public static final Relationship REL_ORIGINAL = new Relationship.Builder()
|
||||
.name("original")
|
||||
.description("The original input file will be routed to this destination when the lines have been successfully routed to 1 or more relationships")
|
||||
|
@ -225,10 +232,10 @@ public class RouteText extends AbstractProcessor {
|
|||
.description("Data that satisfies the required user-defined rules will be routed to this Relationship")
|
||||
.build();
|
||||
|
||||
private static Group EMPTY_GROUP = new Group(Collections.emptyList());
|
||||
private static final Group EMPTY_GROUP = new Group(Collections.emptyList());
|
||||
|
||||
private AtomicReference<Set<Relationship>> relationships = new AtomicReference<>();
|
||||
private List<PropertyDescriptor> properties;
|
||||
private final AtomicReference<Set<Relationship>> relationships =
|
||||
new AtomicReference<>(Set.of(REL_ORIGINAL, REL_NO_MATCH));
|
||||
private volatile String configuredRouteStrategy = ROUTE_STRATEGY.getDefaultValue();
|
||||
private volatile Set<String> dynamicPropertyNames = new HashSet<>();
|
||||
|
||||
|
@ -255,23 +262,6 @@ public class RouteText extends AbstractProcessor {
|
|||
r -> ignoreCase ? Pattern.compile(r, Pattern.CASE_INSENSITIVE) : Pattern.compile(r));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final Set<Relationship> set = new HashSet<>();
|
||||
set.add(REL_ORIGINAL);
|
||||
set.add(REL_NO_MATCH);
|
||||
relationships = new AtomicReference<>(set);
|
||||
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(ROUTE_STRATEGY);
|
||||
properties.add(MATCH_STRATEGY);
|
||||
properties.add(CHARACTER_SET);
|
||||
properties.add(TRIM_WHITESPACE);
|
||||
properties.add(IGNORE_CASE);
|
||||
properties.add(GROUPING_REGEX);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships.get();
|
||||
|
@ -279,7 +269,7 @@ public class RouteText extends AbstractProcessor {
|
|||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -309,7 +299,7 @@ public class RouteText extends AbstractProcessor {
|
|||
newDynamicPropertyNames.add(descriptor.getName());
|
||||
}
|
||||
|
||||
this.dynamicPropertyNames = Collections.unmodifiableSet(newDynamicPropertyNames);
|
||||
this.dynamicPropertyNames = Set.copyOf(newDynamicPropertyNames);
|
||||
}
|
||||
|
||||
// formulate the new set of Relationships
|
||||
|
@ -535,12 +525,11 @@ public class RouteText extends AbstractProcessor {
|
|||
final Group group = flowFileEntry.getKey();
|
||||
final FlowFile flowFile = flowFileEntry.getValue();
|
||||
|
||||
final Map<String, String> attributes = new HashMap<>(2);
|
||||
attributes.put(ROUTE_ATTRIBUTE_KEY, relationship.getName());
|
||||
attributes.put(GROUP_ATTRIBUTE_KEY, StringUtils.join(group.getCapturedValues(), ", "));
|
||||
|
||||
logger.info("Created {} from {}; routing to relationship {}", flowFile, originalFlowFile, relationship.getName());
|
||||
FlowFile updatedFlowFile = session.putAllAttributes(flowFile, attributes);
|
||||
FlowFile updatedFlowFile = session.putAllAttributes(flowFile, Map.of(
|
||||
ROUTE_ATTRIBUTE_KEY, relationship.getName(),
|
||||
GROUP_ATTRIBUTE_KEY, StringUtils.join(group.getCapturedValues(), ", ")
|
||||
));
|
||||
session.getProvenanceReporter().route(updatedFlowFile, entry.getKey());
|
||||
session.transfer(updatedFlowFile, entry.getKey());
|
||||
}
|
||||
|
|
|
@ -16,19 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.apache.commons.lang3.Range;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.SideEffectFree;
|
||||
|
@ -60,6 +47,18 @@ import org.apache.nifi.serialization.record.Record;
|
|||
import org.apache.nifi.serialization.record.RecordSchema;
|
||||
import org.apache.nifi.util.StringUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
@Tags({"record", "sample", "reservoir", "range", "interval"})
|
||||
|
@ -96,7 +95,6 @@ public class SampleRecord extends AbstractProcessor {
|
|||
private static final Pattern RANGE_PATTERN = Pattern.compile("^([0-9]+)?(-)?([0-9]+)?");
|
||||
private static final Pattern INTERVAL_PATTERN = Pattern.compile("([0-9]+)?(-)?([0-9]+)?(?:,|$)");
|
||||
|
||||
|
||||
static final PropertyDescriptor RECORD_READER_FACTORY = new PropertyDescriptor.Builder()
|
||||
.name("record-reader")
|
||||
.displayName("Record Reader")
|
||||
|
@ -176,6 +174,17 @@ public class SampleRecord extends AbstractProcessor {
|
|||
.dependsOn(SAMPLING_STRATEGY, PROBABILISTIC_SAMPLING, RESERVOIR_SAMPLING)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
RECORD_READER_FACTORY,
|
||||
RECORD_WRITER_FACTORY,
|
||||
SAMPLING_STRATEGY,
|
||||
SAMPLING_INTERVAL,
|
||||
SAMPLING_RANGE,
|
||||
SAMPLING_PROBABILITY,
|
||||
RESERVOIR_SIZE,
|
||||
RANDOM_SEED
|
||||
);
|
||||
|
||||
public static final Relationship REL_ORIGINAL = new Relationship.Builder()
|
||||
.name("original")
|
||||
.description("The original FlowFile is routed to this relationship if sampling is successful")
|
||||
|
@ -192,36 +201,20 @@ public class SampleRecord extends AbstractProcessor {
|
|||
+ "is not valid), the original FlowFile will be routed to this relationship")
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> properties;
|
||||
private static final Set<Relationship> relationships;
|
||||
|
||||
static {
|
||||
final List<PropertyDescriptor> props = new ArrayList<>();
|
||||
props.add(RECORD_READER_FACTORY);
|
||||
props.add(RECORD_WRITER_FACTORY);
|
||||
props.add(SAMPLING_STRATEGY);
|
||||
props.add(SAMPLING_INTERVAL);
|
||||
props.add(SAMPLING_RANGE);
|
||||
props.add(SAMPLING_PROBABILITY);
|
||||
props.add(RESERVOIR_SIZE);
|
||||
props.add(RANDOM_SEED);
|
||||
properties = Collections.unmodifiableList(props);
|
||||
|
||||
final Set<Relationship> r = new HashSet<>();
|
||||
r.add(REL_SUCCESS);
|
||||
r.add(REL_FAILURE);
|
||||
r.add(REL_ORIGINAL);
|
||||
relationships = Collections.unmodifiableSet(r);
|
||||
}
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SUCCESS,
|
||||
REL_FAILURE,
|
||||
REL_ORIGINAL
|
||||
);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -32,7 +32,6 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
@ -44,8 +43,6 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -98,13 +95,12 @@ public class ScanAttribute extends AbstractProcessor {
|
|||
.addValidator(StandardValidators.createRegexValidator(0, 1, false))
|
||||
.build();
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
private Set<Relationship> relationships;
|
||||
|
||||
private volatile Pattern dictionaryFilterPattern = null;
|
||||
private volatile Pattern attributePattern = null;
|
||||
private volatile Set<String> dictionaryTerms = null;
|
||||
private volatile SynchronousFileWatcher fileWatcher = null;
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DICTIONARY_FILE,
|
||||
ATTRIBUTE_PATTERN,
|
||||
MATCHING_CRITERIA,
|
||||
DICTIONARY_FILTER
|
||||
);
|
||||
|
||||
public static final Relationship REL_MATCHED = new Relationship.Builder()
|
||||
.name("matched")
|
||||
|
@ -115,29 +111,24 @@ public class ScanAttribute extends AbstractProcessor {
|
|||
.description("FlowFiles whose attributes are not found in the dictionary will be routed to this relationship")
|
||||
.build();
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(DICTIONARY_FILE);
|
||||
properties.add(ATTRIBUTE_PATTERN);
|
||||
properties.add(MATCHING_CRITERIA);
|
||||
properties.add(DICTIONARY_FILTER);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_MATCHED,
|
||||
REL_UNMATCHED
|
||||
);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_MATCHED);
|
||||
relationships.add(REL_UNMATCHED);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
private volatile Pattern dictionaryFilterPattern = null;
|
||||
private volatile Pattern attributePattern = null;
|
||||
private volatile Set<String> dictionaryTerms = null;
|
||||
private volatile SynchronousFileWatcher fileWatcher = null;
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
@ -184,7 +175,7 @@ public class ScanAttribute extends AbstractProcessor {
|
|||
}
|
||||
}
|
||||
|
||||
return Collections.unmodifiableSet(terms);
|
||||
return Set.copyOf(terms);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -31,7 +31,6 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.exception.ProcessException;
|
||||
import org.apache.nifi.processor.io.InputStreamCallback;
|
||||
|
@ -52,8 +51,6 @@ import java.io.InputStreamReader;
|
|||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
@ -89,6 +86,11 @@ public class ScanContent extends AbstractProcessor {
|
|||
.defaultValue(TEXT_ENCODING)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
DICTIONARY,
|
||||
DICTIONARY_ENCODING
|
||||
);
|
||||
|
||||
public static final Relationship REL_MATCH = new Relationship.Builder()
|
||||
.name("matched")
|
||||
.description("FlowFiles that match at least one "
|
||||
|
@ -100,36 +102,25 @@ public class ScanContent extends AbstractProcessor {
|
|||
+ "term in the dictionary are routed to this relationship")
|
||||
.build();
|
||||
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_MATCH,
|
||||
REL_NO_MATCH
|
||||
);
|
||||
|
||||
public static final Charset UTF8 = StandardCharsets.UTF_8;
|
||||
|
||||
private final AtomicReference<SynchronousFileWatcher> fileWatcherRef = new AtomicReference<>();
|
||||
private final AtomicReference<Search<byte[]>> searchRef = new AtomicReference<>();
|
||||
private final ReentrantLock dictionaryUpdateLock = new ReentrantLock();
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
private Set<Relationship> relationships;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(DICTIONARY);
|
||||
properties.add(DICTIONARY_ENCODING);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_MATCH);
|
||||
relationships.add(REL_NO_MATCH);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -263,7 +254,7 @@ public class ScanContent extends AbstractProcessor {
|
|||
if (nextLine == null || nextLine.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
return new SearchTerm<>(nextLine.getBytes("UTF-8"));
|
||||
return new SearchTerm<>(nextLine.getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,16 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
|
||||
import org.apache.nifi.annotation.behavior.SideEffectFree;
|
||||
|
@ -44,10 +34,16 @@ import org.apache.nifi.processor.AbstractProcessor;
|
|||
import org.apache.nifi.processor.DataUnit;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
@Tags({"segment", "split"})
|
||||
|
@ -82,6 +78,8 @@ public class SegmentContent extends AbstractProcessor {
|
|||
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(SIZE);
|
||||
|
||||
public static final Relationship REL_SEGMENTS = new Relationship.Builder()
|
||||
.name("segments")
|
||||
.description("All segments will be sent to this relationship. If the file was small enough that it was not segmented, "
|
||||
|
@ -92,29 +90,19 @@ public class SegmentContent extends AbstractProcessor {
|
|||
.description("The original FlowFile will be sent to this relationship")
|
||||
.build();
|
||||
|
||||
private Set<Relationship> relationships;
|
||||
private List<PropertyDescriptor> propertyDescriptors;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SEGMENTS);
|
||||
relationships.add(REL_ORIGINAL);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
|
||||
final List<PropertyDescriptor> descriptors = new ArrayList<>();
|
||||
descriptors.add(SIZE);
|
||||
this.propertyDescriptors = Collections.unmodifiableList(descriptors);
|
||||
}
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SEGMENTS,
|
||||
REL_ORIGINAL
|
||||
);
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return propertyDescriptors;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -16,20 +16,6 @@
|
|||
*/
|
||||
package org.apache.nifi.processors.standard;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import org.apache.commons.codec.DecoderException;
|
||||
import org.apache.commons.codec.binary.Hex;
|
||||
import org.apache.nifi.annotation.behavior.InputRequirement;
|
||||
|
@ -56,13 +42,25 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.AbstractProcessor;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.io.InputStreamCallback;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
import org.apache.nifi.util.NaiveSearchRingBuffer;
|
||||
import org.apache.nifi.util.Tuple;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
@SideEffectFree
|
||||
@SupportsBatching
|
||||
@Tags({"content", "split", "binary"})
|
||||
|
@ -119,6 +117,13 @@ public class SplitContent extends AbstractProcessor {
|
|||
.defaultValue(TRAILING_POSITION.getValue())
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
FORMAT,
|
||||
BYTE_SEQUENCE,
|
||||
KEEP_SEQUENCE,
|
||||
BYTE_SEQUENCE_LOCATION
|
||||
);
|
||||
|
||||
public static final Relationship REL_SPLITS = new Relationship.Builder()
|
||||
.name("splits")
|
||||
.description("All Splits will be routed to the splits relationship")
|
||||
|
@ -128,34 +133,21 @@ public class SplitContent extends AbstractProcessor {
|
|||
.description("The original file")
|
||||
.build();
|
||||
|
||||
private Set<Relationship> relationships;
|
||||
private List<PropertyDescriptor> properties;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_SPLITS,
|
||||
REL_ORIGINAL
|
||||
);
|
||||
|
||||
private final AtomicReference<byte[]> byteSequence = new AtomicReference<>();
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_SPLITS);
|
||||
relationships.add(REL_ORIGINAL);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(FORMAT);
|
||||
properties.add(BYTE_SEQUENCE);
|
||||
properties.add(KEEP_SEQUENCE);
|
||||
properties.add(BYTE_SEQUENCE_LOCATION);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -42,16 +42,12 @@ import org.apache.nifi.logging.ComponentLog;
|
|||
import org.apache.nifi.processor.DataUnit;
|
||||
import org.apache.nifi.processor.ProcessContext;
|
||||
import org.apache.nifi.processor.ProcessSession;
|
||||
import org.apache.nifi.processor.ProcessorInitializationContext;
|
||||
import org.apache.nifi.processor.Relationship;
|
||||
import org.apache.nifi.processor.util.StandardValidators;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -93,6 +89,12 @@ public class SplitJson extends AbstractJsonPathProcessor {
|
|||
.required(true)
|
||||
.build();
|
||||
|
||||
private static final List<PropertyDescriptor> PROPERTIES = List.of(
|
||||
ARRAY_JSON_PATH_EXPRESSION,
|
||||
NULL_VALUE_DEFAULT_REPRESENTATION,
|
||||
MAX_STRING_LENGTH
|
||||
);
|
||||
|
||||
public static final Relationship REL_ORIGINAL = new Relationship.Builder()
|
||||
.name("original")
|
||||
.description("The original FlowFile that was split into segments. If the FlowFile fails processing, nothing will be sent to "
|
||||
|
@ -108,36 +110,24 @@ public class SplitJson extends AbstractJsonPathProcessor {
|
|||
+ "path does not exist), it will be routed to this relationship")
|
||||
.build();
|
||||
|
||||
private List<PropertyDescriptor> properties;
|
||||
private Set<Relationship> relationships;
|
||||
private static final Set<Relationship> RELATIONSHIPS = Set.of(
|
||||
REL_ORIGINAL,
|
||||
REL_SPLIT,
|
||||
REL_FAILURE
|
||||
);
|
||||
|
||||
private final AtomicReference<JsonPath> JSON_PATH_REF = new AtomicReference<>();
|
||||
private volatile String nullDefaultValue;
|
||||
private volatile Configuration jsonPathConfiguration;
|
||||
|
||||
@Override
|
||||
protected void init(final ProcessorInitializationContext context) {
|
||||
final List<PropertyDescriptor> properties = new ArrayList<>();
|
||||
properties.add(ARRAY_JSON_PATH_EXPRESSION);
|
||||
properties.add(NULL_VALUE_DEFAULT_REPRESENTATION);
|
||||
properties.add(MAX_STRING_LENGTH);
|
||||
this.properties = Collections.unmodifiableList(properties);
|
||||
|
||||
final Set<Relationship> relationships = new HashSet<>();
|
||||
relationships.add(REL_ORIGINAL);
|
||||
relationships.add(REL_SPLIT);
|
||||
relationships.add(REL_FAILURE);
|
||||
this.relationships = Collections.unmodifiableSet(relationships);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Relationship> getRelationships() {
|
||||
return relationships;
|
||||
return RELATIONSHIPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
|
||||
return properties;
|
||||
return PROPERTIES;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -165,7 +155,7 @@ public class SplitJson extends AbstractJsonPathProcessor {
|
|||
};
|
||||
|
||||
String value = validationContext.getProperty(ARRAY_JSON_PATH_EXPRESSION).getValue();
|
||||
return Collections.singleton(validator.validate(ARRAY_JSON_PATH_EXPRESSION.getName(), value, validationContext));
|
||||
return Set.of(validator.validate(ARRAY_JSON_PATH_EXPRESSION.getName(), value, validationContext));
|
||||
}
|
||||
|
||||
@OnScheduled
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue