NIFI-7949: Add tag HCFS to HDFS Processors

Signed-off-by: Pierre Villard <pierre.villard.fr@gmail.com>

This closes #4633.
This commit is contained in:
Siyao Meng 2020-10-28 22:58:06 -07:00 committed by Pierre Villard
parent 1366d017b8
commit 9a3a659c44
No known key found for this signature in database
GPG Key ID: F92A93B30C07C6D5
8 changed files with 8 additions and 8 deletions

View File

@ -54,7 +54,7 @@ import java.util.regex.Pattern;
@TriggerWhenEmpty
@InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED)
@Tags({"hadoop", "HDFS", "delete", "remove", "filesystem"})
@Tags({"hadoop", "HCFS", "HDFS", "delete", "remove", "filesystem"})
@CapabilityDescription("Deletes one or more files or directories from HDFS. The path can be provided as an attribute from an incoming FlowFile, "
+ "or a statically set path that is periodically removed. If this processor has an incoming connection, it"
+ "will ignore running on a periodic basis and instead rely on incoming FlowFiles to trigger a delete. "

View File

@ -58,7 +58,7 @@ import java.util.concurrent.TimeUnit;
@SupportsBatching
@InputRequirement(Requirement.INPUT_REQUIRED)
@Tags({"hadoop", "hdfs", "get", "ingest", "fetch", "source"})
@Tags({"hadoop", "hcfs", "hdfs", "get", "ingest", "fetch", "source"})
@CapabilityDescription("Retrieves a file from HDFS. The content of the incoming FlowFile is replaced by the content of the file in HDFS. "
+ "The file in HDFS is left intact without any changes being made to it.")
@WritesAttribute(attribute="hdfs.failure.reason", description="When a FlowFile is routed to 'failure', this attribute is added indicating why the file could "

View File

@ -70,7 +70,7 @@ import java.util.regex.Pattern;
@TriggerWhenEmpty
@InputRequirement(Requirement.INPUT_FORBIDDEN)
@Tags({"hadoop", "HDFS", "get", "fetch", "ingest", "source", "filesystem"})
@Tags({"hadoop", "HCFS", "HDFS", "get", "fetch", "ingest", "source", "filesystem"})
@CapabilityDescription("Fetch files from Hadoop Distributed File System (HDFS) into FlowFiles. This Processor will delete the file from HDFS after fetching it.")
@WritesAttributes({
@WritesAttribute(attribute = "filename", description = "The name of the file that was read from HDFS."),

View File

@ -60,7 +60,7 @@ import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.processors.hadoop.GetHDFSFileInfo.HDFSFileInfoRequest.Groupping;
@InputRequirement(Requirement.INPUT_ALLOWED)
@Tags({"hadoop", "HDFS", "get", "list", "ingest", "source", "filesystem"})
@Tags({"hadoop", "HCFS", "HDFS", "get", "list", "ingest", "source", "filesystem"})
@CapabilityDescription("Retrieves a listing of files and directories from HDFS. "
+ "This processor creates a FlowFile(s) that represents the HDFS file/dir with relevant information. "
+ "Main purpose of this processor to provide functionality similar to HDFS Client, i.e. count, du, ls, test, etc. "

View File

@ -51,7 +51,7 @@ import java.util.concurrent.TimeUnit;
*
*/
@TriggerWhenEmpty
@Tags({"hadoop", "HDFS", "get", "fetch", "ingest", "source", "sequence file"})
@Tags({"hadoop", "HCFS", "HDFS", "get", "fetch", "ingest", "source", "sequence file"})
@CapabilityDescription("Fetch sequence files from Hadoop Distributed File System (HDFS) into FlowFiles")
@SeeAlso(PutHDFS.class)
public class GetHDFSSequenceFile extends GetHDFS {

View File

@ -82,7 +82,7 @@ import java.util.regex.Pattern;
@TriggerSerially
@TriggerWhenEmpty
@InputRequirement(Requirement.INPUT_FORBIDDEN)
@Tags({"hadoop", "HDFS", "get", "list", "ingest", "source", "filesystem"})
@Tags({"hadoop", "HCFS", "HDFS", "get", "list", "ingest", "source", "filesystem"})
@CapabilityDescription("Retrieves a listing of files from HDFS. Each time a listing is performed, the files with the latest timestamp will be excluded "
+ "and picked up during the next execution of the processor. This is done to ensure that we do not miss any files, or produce duplicates, in the "
+ "cases where files with the same timestamp are written immediately before and after a single execution of the processor. For each file that is "

View File

@ -68,7 +68,7 @@ import java.util.regex.Pattern;
* This processor renames files on HDFS.
*/
@InputRequirement(Requirement.INPUT_ALLOWED)
@Tags({"hadoop", "HDFS", "put", "move", "filesystem", "moveHDFS"})
@Tags({"hadoop", "HCFS", "HDFS", "put", "move", "filesystem", "moveHDFS"})
@CapabilityDescription("Rename existing files or a directory of files (non-recursive) on Hadoop Distributed File System (HDFS).")
@ReadsAttribute(attribute = "filename", description = "The name of the file written to HDFS comes from the value of this attribute.")
@WritesAttributes({

View File

@ -75,7 +75,7 @@ import java.util.stream.Stream;
* This processor copies FlowFiles to HDFS.
*/
@InputRequirement(Requirement.INPUT_REQUIRED)
@Tags({"hadoop", "HDFS", "put", "copy", "filesystem"})
@Tags({"hadoop", "HCFS", "HDFS", "put", "copy", "filesystem"})
@CapabilityDescription("Write FlowFile data to Hadoop Distributed File System (HDFS)")
@ReadsAttribute(attribute = "filename", description = "The name of the file written to HDFS comes from the value of this attribute.")
@WritesAttributes({