NIFI-3191 - HDFS Processors Should Allow Choosing LZO Compression

This closes #1802.

Signed-off-by: Bryan Bende <bbende@apache.org>
This commit is contained in:
Pierre Villard 2017-05-15 19:55:48 +02:00 committed by Bryan Bende
parent d63fa8a822
commit ba49b8427c
No known key found for this signature in database
GPG Key ID: A0DDA9ED50711C39
2 changed files with 33 additions and 8 deletions

View File

@ -82,7 +82,7 @@ public abstract class AbstractHadoopProcessor extends AbstractProcessor {
public static final PropertyDescriptor COMPRESSION_CODEC = new PropertyDescriptor.Builder() public static final PropertyDescriptor COMPRESSION_CODEC = new PropertyDescriptor.Builder()
.name("Compression codec") .name("Compression codec")
.required(true) .required(true)
.allowableValues(CompressionType.values()) .allowableValues(CompressionType.allowableValues())
.defaultValue(CompressionType.NONE.toString()) .defaultValue(CompressionType.NONE.toString())
.build(); .build();

View File

@ -16,23 +16,39 @@
*/ */
package org.apache.nifi.processors.hadoop; package org.apache.nifi.processors.hadoop;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.io.compress.BZip2Codec; import org.apache.hadoop.io.compress.BZip2Codec;
import org.apache.hadoop.io.compress.DefaultCodec; import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.io.compress.GzipCodec; import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.io.compress.Lz4Codec; import org.apache.hadoop.io.compress.Lz4Codec;
import org.apache.hadoop.io.compress.SnappyCodec; import org.apache.hadoop.io.compress.SnappyCodec;
import org.apache.nifi.components.AllowableValue;
/** /**
* Compression Type Enum for Hadoop related processors. * Compression Type Enum for Hadoop related processors.
*/ */
public enum CompressionType { public enum CompressionType {
NONE,
DEFAULT, NONE("No compression"),
BZIP, DEFAULT("Default ZLIB compression"),
GZIP, BZIP("BZIP compression"),
LZ4, GZIP("GZIP compression"),
SNAPPY, LZ4("LZ4 compression"),
AUTOMATIC; LZO("LZO compression - it assumes LD_LIBRARY_PATH has been set and jar is available"),
SNAPPY("Snappy compression"),
AUTOMATIC("Will attempt to automatically detect the compression codec.");
private final String description;
private CompressionType(String description) {
this.description = description;
}
private String getDescription() {
return this.description;
}
@Override @Override
public String toString() { public String toString() {
@ -42,10 +58,19 @@ public enum CompressionType {
case BZIP: return BZip2Codec.class.getName(); case BZIP: return BZip2Codec.class.getName();
case GZIP: return GzipCodec.class.getName(); case GZIP: return GzipCodec.class.getName();
case LZ4: return Lz4Codec.class.getName(); case LZ4: return Lz4Codec.class.getName();
case LZO: return "com.hadoop.compression.lzo.LzoCodec";
case SNAPPY: return SnappyCodec.class.getName(); case SNAPPY: return SnappyCodec.class.getName();
case AUTOMATIC: return "Automatically Detected"; case AUTOMATIC: return "Automatically Detected";
} }
return null; return null;
} }
public static AllowableValue[] allowableValues() {
List<AllowableValue> values = new ArrayList<AllowableValue>();
for (CompressionType type : CompressionType.values()) {
values.add(new AllowableValue(type.name(), type.name(), type.getDescription()));
}
return values.toArray(new AllowableValue[values.size()]);
}
} }