filesVisited) throws IOException {
if (null == filesVisited) {
@@ -465,11 +464,11 @@ public class GetHDFS extends AbstractHadoopProcessor {
}
/**
- * Returns the relative path of the child that does not include the filename
- * or the root path.
- * @param root
- * @param child
- * @return
+ * Returns the relative path of the child that does not include the filename or the root path.
+ *
+ * @param root root
+ * @param child child
+ * @return the relative path of the child that does not include the filename or the root path
*/
public static String getPathDifference(final Path root, final Path child) {
final int depthDiff = child.depth() - root.depth();
@@ -492,8 +491,7 @@ public class GetHDFS extends AbstractHadoopProcessor {
}
/**
- * Holder for a snapshot in time of some processor properties that are
- * passed around.
+ * Holder for a snapshot in time of some processor properties that are passed around.
*/
protected static class ProcessorConfiguration {
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSSequenceFile.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSSequenceFile.java
index 88e725b19f..22ba36bfb2 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSSequenceFile.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/GetHDFSSequenceFile.java
@@ -40,19 +40,13 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
- * This processor is used to pull files from HDFS. The files being pulled in
- * MUST be SequenceFile formatted files. The processor creates a flow file for
- * each key/value entry in the ingested SequenceFile. The created flow file's
- * content depends on the value of the optional configuration property FlowFile
- * Content. Currently, there are two choices: VALUE ONLY and KEY VALUE PAIR.
- * With the prior, only the SequenceFile value element is written to the flow
- * file contents. With the latter, the SequenceFile key and value are written to
- * the flow file contents as serialized objects; the format is key length (int),
- * key(String), value length(int), value(bytes). The default is VALUE ONLY.
+ * This processor is used to pull files from HDFS. The files being pulled in MUST be SequenceFile formatted files. The processor creates a flow file for each key/value entry in the ingested
+ * SequenceFile. The created flow file's content depends on the value of the optional configuration property FlowFile Content. Currently, there are two choices: VALUE ONLY and KEY VALUE PAIR. With the
+ * prior, only the SequenceFile value element is written to the flow file contents. With the latter, the SequenceFile key and value are written to the flow file contents as serialized objects; the
+ * format is key length (int), key(String), value length(int), value(bytes). The default is VALUE ONLY.
*
- * NOTE: This processor loads the entire value entry into memory. While the size
- * limit for a value entry is 2GB, this will cause memory problems if there are
- * too many concurrent tasks and the data being ingested is large.
+ * NOTE: This processor loads the entire value entry into memory. While the size limit for a value entry is 2GB, this will cause memory problems if there are too many concurrent tasks and the data
+ * being ingested is large.
*
*/
@TriggerWhenEmpty
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/KeyValueReader.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/KeyValueReader.java
index 40ef5fa246..38f2aaed95 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/KeyValueReader.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/KeyValueReader.java
@@ -43,10 +43,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
- * This class reads a SequenceFile and generates FlowFiles, one per KeyValue
- * pair in the SequenceFile. The FlowFile name is based on the the incoming file
- * name with System nanotime appended; the FlowFile content is the key/value
- * pair serialized via Text.
+ * This class reads a SequenceFile and generates FlowFiles, one per KeyValue pair in the SequenceFile. The FlowFile name is based on the the incoming file name with System nanotime appended; the
+ * FlowFile content is the key/value pair serialized via Text.
*/
public class KeyValueReader implements SequenceFileReader> {
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ValueReader.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ValueReader.java
index e334582ec6..a6f70054d2 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ValueReader.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/ValueReader.java
@@ -42,9 +42,8 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
- * This class reads a SequenceFile and generates FlowFiles, one per each
- * KeyValue Pair in the SequenceFile. The FlowFile name is the key, which is
- * typically a file name but may not be; the FlowFile content is the value.
+ * This class reads a SequenceFile and generates FlowFiles, one per each KeyValue Pair in the SequenceFile. The FlowFile name is the key, which is typically a file name but may not be; the FlowFile
+ * content is the value.
*
*/
public class ValueReader implements SequenceFileReader> {
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/ByteFilteringOutputStream.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/ByteFilteringOutputStream.java
index 5cbcab63dc..58a30f5946 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/ByteFilteringOutputStream.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/ByteFilteringOutputStream.java
@@ -24,10 +24,8 @@ import java.util.Arrays;
import java.util.List;
/**
- * This class allows the user to define byte-array filters or single-byte
- * filters that will modify the content that is written to the underlying
- * stream. Each filter can be given a maximum number of replacements that it
- * should perform.
+ * This class allows the user to define byte-array filters or single-byte filters that will modify the content that is written to the underlying stream. Each filter can be given a maximum number of
+ * replacements that it should perform.
*/
public class ByteFilteringOutputStream extends FilterOutputStream {
@@ -66,8 +64,7 @@ public class ByteFilteringOutputStream extends FilterOutputStream {
/**
* Causes this stream to write replaceWith in place of
- * toReplace if {@link #write(byte[], int, int)} is called where
- * the value to write is equal to
+ * toReplace if {@link #write(byte[], int, int)} is called where the value to write is equal to
* toReplace.
*
* @param toReplace the byte array to replace
@@ -79,14 +76,12 @@ public class ByteFilteringOutputStream extends FilterOutputStream {
/**
* Causes this stream to write replaceWith in place of
- * toReplace if {@link #write(byte[], int, int)} is called where
- * the value to write is equal to
+ * toReplace if {@link #write(byte[], int, int)} is called where the value to write is equal to
* toReplace.
*
* @param toReplace the byte array to replace
* @param replaceWith the byte array to be substituted
- * @param maxReplacements the maximum number of replacements that should be
- * made
+ * @param maxReplacements the maximum number of replacements that should be made
*/
public void addFilter(final byte[] toReplace, final byte[] replaceWith, final int maxReplacements) {
multiByteFilters.add(new Filter(toReplace, replaceWith, maxReplacements));
@@ -94,8 +89,7 @@ public class ByteFilteringOutputStream extends FilterOutputStream {
/**
* Causes this stream to write replaceWith in place of
- * toReplace if {@link #write(int)} is called where the value to
- * write is equal to
+ * toReplace if {@link #write(int)} is called where the value to write is equal to
* toReplace.
*
* @param toReplace the byte to replace
@@ -107,14 +101,12 @@ public class ByteFilteringOutputStream extends FilterOutputStream {
/**
* Causes this stream to write replaceWith in place of
- * toReplace if {@link #write(int)} is called where the value to
- * write is equal to
+ * toReplace if {@link #write(int)} is called where the value to write is equal to
* toReplace.
*
* @param toReplace the byte to replace
* @param replaceWith the byte to be substituted
- * @param maxReplacements the maximum number of replacements that should be
- * made
+ * @param maxReplacements the maximum number of replacements that should be made
*/
public void addFilter(final byte toReplace, final byte replaceWith, final int maxReplacements) {
singleByteFilters.add(new Filter(new byte[]{toReplace}, new byte[]{replaceWith}, maxReplacements));
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/InputStreamWritable.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/InputStreamWritable.java
index ee094508c7..4cb2e8ddee 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/InputStreamWritable.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/InputStreamWritable.java
@@ -24,10 +24,8 @@ import java.io.InputStream;
import org.apache.hadoop.io.Writable;
/**
- * Simple implementation of {@link Writable} that writes data from an
- * InputStream. This class will throw an
- * UnsupportedOperationException if {@link #readFields(DataInput)} is
- * called.
+ * Simple implementation of {@link Writable} that writes data from an InputStream. This class will throw an
+ * UnsupportedOperationException if {@link #readFields(DataInput)} is called.
*/
public class InputStreamWritable implements Writable {
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/OutputStreamWritable.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/OutputStreamWritable.java
index 62fdc3512e..e5f29dd40f 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/OutputStreamWritable.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/OutputStreamWritable.java
@@ -28,8 +28,7 @@ import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.Writable;
/**
- * This class will write to an output stream, rather than an in-memory buffer,
- * the fields being read.
+ * This class will write to an output stream, rather than an in-memory buffer, the fields being read.
*
* @author unattributed
*
diff --git a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/SequenceFileWriter.java b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/SequenceFileWriter.java
index 35703b1679..851afd842b 100644
--- a/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/SequenceFileWriter.java
+++ b/nifi/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/util/SequenceFileWriter.java
@@ -25,15 +25,13 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
public interface SequenceFileWriter {
/**
- * Creates a Sequence File by writing the given FlowFile as key/value pairs.
- * The provided FlowFile may be a package of multiple FlowFiles, or just
- * one. The keys for the Sequence File are the flow files' logical names.
- * The values are the flow files' content.
+ * Creates a Sequence File by writing the given FlowFile as key/value pairs. The provided FlowFile may be a package of multiple FlowFiles, or just one. The keys for the Sequence File are the flow
+ * files' logical names. The values are the flow files' content.
*
* @param flowFile - the FlowFile to write to the Sequence File.
- * @param session
- * @param configuration
- * @param compressionType
+ * @param session session
+ * @param configuration configuration
+ * @param compressionType compression type
* @return the written to SequenceFile flow file
*/
FlowFile writeSequenceFile(FlowFile flowFile, ProcessSession session, Configuration configuration, CompressionType compressionType);