From 293a3042cd4d8f0340f6e6be3f082acac97aaa67 Mon Sep 17 00:00:00 2001 From: Pierre Villard Date: Wed, 20 Apr 2016 12:24:39 +0200 Subject: [PATCH] NIFI-1788 Fixed listed allowable values NIFI-1788 Added UT NIFI-1788 modified test name This closes #368 --- .../processors/hadoop/CreateHadoopSequenceFile.java | 2 +- .../processors/hadoop/SequenceFileWriterImpl.java | 6 ++++-- .../hadoop/TestCreateHadoopSequenceFile.java | 11 +++++++++++ 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java index 4b8f87e383..777015208d 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/CreateHadoopSequenceFile.java @@ -90,7 +90,7 @@ public class CreateHadoopSequenceFile extends AbstractHadoopProcessor { static final PropertyDescriptor COMPRESSION_TYPE = new PropertyDescriptor.Builder() .name("compression type") .description("Type of compression to use when creating Sequence File") - .allowableValues(CompressionType.values()) + .allowableValues(SequenceFile.CompressionType.values()) .build(); // Default Values. diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/SequenceFileWriterImpl.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/SequenceFileWriterImpl.java index a0d02f7346..2c586e0bb7 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/SequenceFileWriterImpl.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/main/java/org/apache/nifi/processors/hadoop/SequenceFileWriterImpl.java @@ -94,8 +94,10 @@ public class SequenceFileWriterImpl implements SequenceFileWriter { try (final FSDataOutputStream fsDataOutputStream = new FSDataOutputStream(bwos, new Statistics("")); final SequenceFile.Writer writer = SequenceFile.createWriter(configuration, - fsDataOutputStream, Text.class, InputStreamWritable.class, compressionType, - new DefaultCodec())) { + SequenceFile.Writer.stream(fsDataOutputStream), + SequenceFile.Writer.keyClass(Text.class), + SequenceFile.Writer.valueClass(InputStreamWritable.class), + SequenceFile.Writer.compression(compressionType, new DefaultCodec()))) { processInputStream(in, flowFile, writer); diff --git a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestCreateHadoopSequenceFile.java b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestCreateHadoopSequenceFile.java index e568dfb801..8164fc042a 100644 --- a/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestCreateHadoopSequenceFile.java +++ b/nifi-nar-bundles/nifi-hadoop-bundle/nifi-hdfs-processors/src/test/java/org/apache/nifi/processors/hadoop/TestCreateHadoopSequenceFile.java @@ -18,6 +18,8 @@ package org.apache.nifi.processors.hadoop; import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.Text; +import org.apache.nifi.components.AllowableValue; +import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.hadoop.KerberosProperties; import org.apache.nifi.util.MockFlowFile; @@ -79,6 +81,15 @@ public class TestCreateHadoopSequenceFile { controller.clearTransferState(); } + @Test + public void validateAllowableValuesForCompressionType() { + PropertyDescriptor pd = CreateHadoopSequenceFile.COMPRESSION_TYPE; + List allowableValues = pd.getAllowableValues(); + assertEquals("NONE", allowableValues.get(0).getValue()); + assertEquals("RECORD", allowableValues.get(1).getValue()); + assertEquals("BLOCK", allowableValues.get(2).getValue()); + } + @Test public void testSimpleCase() throws IOException { for (File inFile : inFiles) {