HADOOP-8700. Use enum to define the checksum constants in DataChecksum.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1373683 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
f661d9164d
commit
3cab01ba6e
|
@ -91,6 +91,9 @@ Trunk (unreleased changes)
|
|||
HADOOP-8624. ProtobufRpcEngine should log all RPCs if TRACE logging is
|
||||
enabled (todd)
|
||||
|
||||
HADOOP-8700. Use enum to define the checksum constants in DataChecksum.
|
||||
(szetszwo)
|
||||
|
||||
BUG FIXES
|
||||
|
||||
HADOOP-8177. MBeans shouldn't try to register when it fails to create MBeanName.
|
||||
|
|
|
@ -43,31 +43,44 @@ public class DataChecksum implements Checksum {
|
|||
public static final int CHECKSUM_NULL = 0;
|
||||
public static final int CHECKSUM_CRC32 = 1;
|
||||
public static final int CHECKSUM_CRC32C = 2;
|
||||
|
||||
private static String[] NAMES = new String[] {
|
||||
"NULL", "CRC32", "CRC32C"
|
||||
};
|
||||
|
||||
private static final int CHECKSUM_NULL_SIZE = 0;
|
||||
private static final int CHECKSUM_CRC32_SIZE = 4;
|
||||
private static final int CHECKSUM_CRC32C_SIZE = 4;
|
||||
|
||||
|
||||
public static DataChecksum newDataChecksum( int type, int bytesPerChecksum ) {
|
||||
|
||||
/** The checksum types */
|
||||
public static enum Type {
|
||||
NULL (CHECKSUM_NULL, 0),
|
||||
CRC32 (CHECKSUM_CRC32, 4),
|
||||
CRC32C(CHECKSUM_CRC32C, 4);
|
||||
|
||||
public final int id;
|
||||
public final int size;
|
||||
|
||||
private Type(int id, int size) {
|
||||
this.id = id;
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
/** @return the type corresponding to the id. */
|
||||
public static Type valueOf(int id) {
|
||||
if (id < 0 || id >= values().length) {
|
||||
throw new IllegalArgumentException("id=" + id
|
||||
+ " out of range [0, " + values().length + ")");
|
||||
}
|
||||
return values()[id];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static DataChecksum newDataChecksum(Type type, int bytesPerChecksum ) {
|
||||
if ( bytesPerChecksum <= 0 ) {
|
||||
return null;
|
||||
}
|
||||
|
||||
switch ( type ) {
|
||||
case CHECKSUM_NULL :
|
||||
return new DataChecksum( CHECKSUM_NULL, new ChecksumNull(),
|
||||
CHECKSUM_NULL_SIZE, bytesPerChecksum );
|
||||
case CHECKSUM_CRC32 :
|
||||
return new DataChecksum( CHECKSUM_CRC32, new PureJavaCrc32(),
|
||||
CHECKSUM_CRC32_SIZE, bytesPerChecksum );
|
||||
case CHECKSUM_CRC32C:
|
||||
return new DataChecksum( CHECKSUM_CRC32C, new PureJavaCrc32C(),
|
||||
CHECKSUM_CRC32C_SIZE, bytesPerChecksum);
|
||||
case NULL :
|
||||
return new DataChecksum(type, new ChecksumNull(), bytesPerChecksum );
|
||||
case CRC32 :
|
||||
return new DataChecksum(type, new PureJavaCrc32(), bytesPerChecksum );
|
||||
case CRC32C:
|
||||
return new DataChecksum(type, new PureJavaCrc32C(), bytesPerChecksum);
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
@ -87,7 +100,7 @@ public class DataChecksum implements Checksum {
|
|||
( (bytes[offset+2] & 0xff) << 16 ) |
|
||||
( (bytes[offset+3] & 0xff) << 8 ) |
|
||||
( (bytes[offset+4] & 0xff) );
|
||||
return newDataChecksum( bytes[0], bytesPerChecksum );
|
||||
return newDataChecksum( Type.valueOf(bytes[0]), bytesPerChecksum );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -98,7 +111,7 @@ public class DataChecksum implements Checksum {
|
|||
throws IOException {
|
||||
int type = in.readByte();
|
||||
int bpc = in.readInt();
|
||||
DataChecksum summer = newDataChecksum( type, bpc );
|
||||
DataChecksum summer = newDataChecksum(Type.valueOf(type), bpc );
|
||||
if ( summer == null ) {
|
||||
throw new IOException( "Could not create DataChecksum of type " +
|
||||
type + " with bytesPerChecksum " + bpc );
|
||||
|
@ -111,13 +124,13 @@ public class DataChecksum implements Checksum {
|
|||
*/
|
||||
public void writeHeader( DataOutputStream out )
|
||||
throws IOException {
|
||||
out.writeByte( type );
|
||||
out.writeByte( type.id );
|
||||
out.writeInt( bytesPerChecksum );
|
||||
}
|
||||
|
||||
public byte[] getHeader() {
|
||||
byte[] header = new byte[DataChecksum.HEADER_LEN];
|
||||
header[0] = (byte) (type & 0xff);
|
||||
header[0] = (byte) (type.id & 0xff);
|
||||
// Writing in buffer just like DataOutput.WriteInt()
|
||||
header[1+0] = (byte) ((bytesPerChecksum >>> 24) & 0xff);
|
||||
header[1+1] = (byte) ((bytesPerChecksum >>> 16) & 0xff);
|
||||
|
@ -133,11 +146,11 @@ public class DataChecksum implements Checksum {
|
|||
*/
|
||||
public int writeValue( DataOutputStream out, boolean reset )
|
||||
throws IOException {
|
||||
if ( size <= 0 ) {
|
||||
if ( type.size <= 0 ) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if ( size == 4 ) {
|
||||
if ( type.size == 4 ) {
|
||||
out.writeInt( (int) summer.getValue() );
|
||||
} else {
|
||||
throw new IOException( "Unknown Checksum " + type );
|
||||
|
@ -147,7 +160,7 @@ public class DataChecksum implements Checksum {
|
|||
reset();
|
||||
}
|
||||
|
||||
return size;
|
||||
return type.size;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -157,11 +170,11 @@ public class DataChecksum implements Checksum {
|
|||
*/
|
||||
public int writeValue( byte[] buf, int offset, boolean reset )
|
||||
throws IOException {
|
||||
if ( size <= 0 ) {
|
||||
if ( type.size <= 0 ) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if ( size == 4 ) {
|
||||
if ( type.size == 4 ) {
|
||||
int checksum = (int) summer.getValue();
|
||||
buf[offset+0] = (byte) ((checksum >>> 24) & 0xff);
|
||||
buf[offset+1] = (byte) ((checksum >>> 16) & 0xff);
|
||||
|
@ -175,7 +188,7 @@ public class DataChecksum implements Checksum {
|
|||
reset();
|
||||
}
|
||||
|
||||
return size;
|
||||
return type.size;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -183,36 +196,33 @@ public class DataChecksum implements Checksum {
|
|||
* @return true if the checksum matches and false otherwise.
|
||||
*/
|
||||
public boolean compare( byte buf[], int offset ) {
|
||||
if ( size == 4 ) {
|
||||
if ( type.size == 4 ) {
|
||||
int checksum = ( (buf[offset+0] & 0xff) << 24 ) |
|
||||
( (buf[offset+1] & 0xff) << 16 ) |
|
||||
( (buf[offset+2] & 0xff) << 8 ) |
|
||||
( (buf[offset+3] & 0xff) );
|
||||
return checksum == (int) summer.getValue();
|
||||
}
|
||||
return size == 0;
|
||||
return type.size == 0;
|
||||
}
|
||||
|
||||
private final int type;
|
||||
private final int size;
|
||||
private final Type type;
|
||||
private final Checksum summer;
|
||||
private final int bytesPerChecksum;
|
||||
private int inSum = 0;
|
||||
|
||||
private DataChecksum( int checksumType, Checksum checksum,
|
||||
int sumSize, int chunkSize ) {
|
||||
type = checksumType;
|
||||
private DataChecksum( Type type, Checksum checksum, int chunkSize ) {
|
||||
this.type = type;
|
||||
summer = checksum;
|
||||
size = sumSize;
|
||||
bytesPerChecksum = chunkSize;
|
||||
}
|
||||
|
||||
// Accessors
|
||||
public int getChecksumType() {
|
||||
public Type getChecksumType() {
|
||||
return type;
|
||||
}
|
||||
public int getChecksumSize() {
|
||||
return size;
|
||||
return type.size;
|
||||
}
|
||||
public int getBytesPerChecksum() {
|
||||
return bytesPerChecksum;
|
||||
|
@ -260,7 +270,7 @@ public class DataChecksum implements Checksum {
|
|||
public void verifyChunkedSums(ByteBuffer data, ByteBuffer checksums,
|
||||
String fileName, long basePos)
|
||||
throws ChecksumException {
|
||||
if (size == 0) return;
|
||||
if (type.size == 0) return;
|
||||
|
||||
if (data.hasArray() && checksums.hasArray()) {
|
||||
verifyChunkedSums(
|
||||
|
@ -270,7 +280,7 @@ public class DataChecksum implements Checksum {
|
|||
return;
|
||||
}
|
||||
if (NativeCrc32.isAvailable()) {
|
||||
NativeCrc32.verifyChunkedSums(bytesPerChecksum, type, checksums, data,
|
||||
NativeCrc32.verifyChunkedSums(bytesPerChecksum, type.id, checksums, data,
|
||||
fileName, basePos);
|
||||
return;
|
||||
}
|
||||
|
@ -280,7 +290,7 @@ public class DataChecksum implements Checksum {
|
|||
checksums.mark();
|
||||
try {
|
||||
byte[] buf = new byte[bytesPerChecksum];
|
||||
byte[] sum = new byte[size];
|
||||
byte[] sum = new byte[type.size];
|
||||
while (data.remaining() > 0) {
|
||||
int n = Math.min(data.remaining(), bytesPerChecksum);
|
||||
checksums.get(sum);
|
||||
|
@ -351,7 +361,7 @@ public class DataChecksum implements Checksum {
|
|||
* buffer to put the checksums.
|
||||
*/
|
||||
public void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums) {
|
||||
if (size == 0) return;
|
||||
if (type.size == 0) return;
|
||||
|
||||
if (data.hasArray() && checksums.hasArray()) {
|
||||
calculateChunkedSums(data.array(), data.arrayOffset() + data.position(), data.remaining(),
|
||||
|
@ -411,18 +421,12 @@ public class DataChecksum implements Checksum {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return (this.type + 31) * this.bytesPerChecksum;
|
||||
return (this.type.id + 31) * this.bytesPerChecksum;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
String strType;
|
||||
if (type < NAMES.length && type > 0) {
|
||||
strType = NAMES[type];
|
||||
} else {
|
||||
strType = String.valueOf(type);
|
||||
}
|
||||
return "DataChecksum(type=" + strType +
|
||||
return "DataChecksum(type=" + type +
|
||||
", chunkSize=" + bytesPerChecksum + ")";
|
||||
}
|
||||
|
||||
|
|
|
@ -35,13 +35,13 @@ public class TestDataChecksum {
|
|||
private static final int DATA_TRAILER_IN_BUFFER = 3;
|
||||
|
||||
private static final int BYTES_PER_CHUNK = 512;
|
||||
private static final int CHECKSUM_TYPES[] = new int[] {
|
||||
DataChecksum.CHECKSUM_CRC32, DataChecksum.CHECKSUM_CRC32C
|
||||
private static final DataChecksum.Type CHECKSUM_TYPES[] = {
|
||||
DataChecksum.Type.CRC32, DataChecksum.Type.CRC32C
|
||||
};
|
||||
|
||||
@Test
|
||||
public void testBulkOps() throws Exception {
|
||||
for (int type : CHECKSUM_TYPES) {
|
||||
for (DataChecksum.Type type : CHECKSUM_TYPES) {
|
||||
System.err.println(
|
||||
"---- beginning tests with checksum type " + type + "----");
|
||||
DataChecksum checksum = DataChecksum.newDataChecksum(
|
||||
|
@ -118,21 +118,20 @@ public class TestDataChecksum {
|
|||
@Test
|
||||
public void testEquality() {
|
||||
assertEquals(
|
||||
DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512),
|
||||
DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512));
|
||||
DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512),
|
||||
DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512));
|
||||
assertFalse(
|
||||
DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512).equals(
|
||||
DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 1024)));
|
||||
DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512).equals(
|
||||
DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 1024)));
|
||||
assertFalse(
|
||||
DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512).equals(
|
||||
DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32C, 512)));
|
||||
DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512).equals(
|
||||
DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 512)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToString() {
|
||||
assertEquals("DataChecksum(type=CRC32, chunkSize=512)",
|
||||
DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512)
|
||||
.toString());
|
||||
DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512).toString());
|
||||
}
|
||||
|
||||
private static void corruptBufferOffset(ByteBuffer buf, int offset) {
|
||||
|
|
|
@ -290,7 +290,7 @@ class BlockReaderLocal implements BlockReader {
|
|||
long length, BlockLocalPathInfo pathinfo, FileInputStream dataIn)
|
||||
throws IOException {
|
||||
this(conf, hdfsfile, block, token, startOffset, length, pathinfo,
|
||||
DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_NULL, 4), false,
|
||||
DataChecksum.newDataChecksum(DataChecksum.Type.NULL, 4), false,
|
||||
dataIn, startOffset, null);
|
||||
}
|
||||
|
||||
|
|
|
@ -199,7 +199,7 @@ public class DFSClient implements java.io.Closeable {
|
|||
final int maxBlockAcquireFailures;
|
||||
final int confTime;
|
||||
final int ioBufferSize;
|
||||
final int checksumType;
|
||||
final DataChecksum.Type checksumType;
|
||||
final int bytesPerChecksum;
|
||||
final int writePacketSize;
|
||||
final int socketTimeout;
|
||||
|
@ -270,18 +270,17 @@ public class DFSClient implements java.io.Closeable {
|
|||
DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT);
|
||||
}
|
||||
|
||||
private int getChecksumType(Configuration conf) {
|
||||
String checksum = conf.get(DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY,
|
||||
private DataChecksum.Type getChecksumType(Configuration conf) {
|
||||
final String checksum = conf.get(
|
||||
DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY,
|
||||
DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT);
|
||||
if ("CRC32".equals(checksum)) {
|
||||
return DataChecksum.CHECKSUM_CRC32;
|
||||
} else if ("CRC32C".equals(checksum)) {
|
||||
return DataChecksum.CHECKSUM_CRC32C;
|
||||
} else if ("NULL".equals(checksum)) {
|
||||
return DataChecksum.CHECKSUM_NULL;
|
||||
} else {
|
||||
LOG.warn("Bad checksum type: " + checksum + ". Using default.");
|
||||
return DataChecksum.CHECKSUM_CRC32C;
|
||||
try {
|
||||
return DataChecksum.Type.valueOf(checksum);
|
||||
} catch(IllegalArgumentException iae) {
|
||||
LOG.warn("Bad checksum type: " + checksum + ". Using default "
|
||||
+ DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT);
|
||||
return DataChecksum.Type.valueOf(
|
||||
DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -31,9 +31,6 @@ import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
|
|||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.util.DataChecksum;
|
||||
|
||||
import com.google.common.collect.BiMap;
|
||||
import com.google.common.collect.ImmutableBiMap;
|
||||
|
||||
|
||||
/**
|
||||
* Static utilities for dealing with the protocol buffers used by the
|
||||
|
@ -42,19 +39,6 @@ import com.google.common.collect.ImmutableBiMap;
|
|||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public abstract class DataTransferProtoUtil {
|
||||
|
||||
/**
|
||||
* Map between the internal DataChecksum identifiers and the protobuf-
|
||||
* generated identifiers on the wire.
|
||||
*/
|
||||
static BiMap<Integer, ChecksumProto.ChecksumType> checksumTypeMap =
|
||||
ImmutableBiMap.<Integer, ChecksumProto.ChecksumType>builder()
|
||||
.put(DataChecksum.CHECKSUM_CRC32, ChecksumProto.ChecksumType.CRC32)
|
||||
.put(DataChecksum.CHECKSUM_CRC32C, ChecksumProto.ChecksumType.CRC32C)
|
||||
.put(DataChecksum.CHECKSUM_NULL, ChecksumProto.ChecksumType.NULL)
|
||||
.build();
|
||||
|
||||
|
||||
static BlockConstructionStage fromProto(
|
||||
OpWriteBlockProto.BlockConstructionStage stage) {
|
||||
return BlockConstructionStage.valueOf(BlockConstructionStage.class,
|
||||
|
@ -68,7 +52,7 @@ public abstract class DataTransferProtoUtil {
|
|||
}
|
||||
|
||||
public static ChecksumProto toProto(DataChecksum checksum) {
|
||||
ChecksumType type = checksumTypeMap.get(checksum.getChecksumType());
|
||||
ChecksumType type = ChecksumType.valueOf(checksum.getChecksumType().name());
|
||||
if (type == null) {
|
||||
throw new IllegalArgumentException(
|
||||
"Can't convert checksum to protobuf: " + checksum);
|
||||
|
@ -84,7 +68,7 @@ public abstract class DataTransferProtoUtil {
|
|||
if (proto == null) return null;
|
||||
|
||||
int bytesPerChecksum = proto.getBytesPerChecksum();
|
||||
int type = checksumTypeMap.inverse().get(proto.getType());
|
||||
DataChecksum.Type type = DataChecksum.Type.valueOf(proto.getType().name());
|
||||
|
||||
return DataChecksum.newDataChecksum(type, bytesPerChecksum);
|
||||
}
|
||||
|
|
|
@ -40,7 +40,6 @@ import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
|
|||
import org.apache.hadoop.hdfs.util.DataTransferThrottler;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.ReadaheadPool;
|
||||
import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
|
||||
import org.apache.hadoop.io.nativeio.NativeIO;
|
||||
import org.apache.hadoop.net.SocketOutputStream;
|
||||
|
@ -236,8 +235,7 @@ class BlockSender implements java.io.Closeable {
|
|||
} else {
|
||||
LOG.warn("Could not find metadata file for " + block);
|
||||
// This only decides the buffer size. Use BUFFER_SIZE?
|
||||
csum = DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_NULL,
|
||||
16 * 1024);
|
||||
csum = DataChecksum.newDataChecksum(DataChecksum.Type.NULL, 16 * 1024);
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -75,7 +75,7 @@ public class TestDataTransferProtocol {
|
|||
"org.apache.hadoop.hdfs.TestDataTransferProtocol");
|
||||
|
||||
private static final DataChecksum DEFAULT_CHECKSUM =
|
||||
DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32C, 512);
|
||||
DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 512);
|
||||
|
||||
DatanodeID datanode;
|
||||
InetSocketAddress dnAddr;
|
||||
|
|
|
@ -94,8 +94,8 @@ public class SimulatedFSDataset implements FsDatasetSpi<FsVolumeSpi> {
|
|||
|
||||
static final byte[] nullCrcFileData;
|
||||
static {
|
||||
DataChecksum checksum = DataChecksum.newDataChecksum( DataChecksum.
|
||||
CHECKSUM_NULL, 16*1024 );
|
||||
DataChecksum checksum = DataChecksum.newDataChecksum(
|
||||
DataChecksum.Type.NULL, 16*1024 );
|
||||
byte[] nullCrcHeader = checksum.getHeader();
|
||||
nullCrcFileData = new byte[2 + nullCrcHeader.length];
|
||||
nullCrcFileData[0] = (byte) ((BlockMetadataHeader.VERSION >>> 8) & 0xff);
|
||||
|
|
|
@ -550,7 +550,7 @@ public class TestBlockRecovery {
|
|||
ReplicaOutputStreams streams = null;
|
||||
try {
|
||||
streams = replicaInfo.createStreams(true,
|
||||
DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512));
|
||||
DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512));
|
||||
streams.getChecksumOut().write('a');
|
||||
dn.data.initReplicaRecovery(new RecoveringBlock(block, null, RECOVERY_ID+1));
|
||||
try {
|
||||
|
|
|
@ -142,7 +142,7 @@ public class TestDiskError {
|
|||
DataOutputStream out = new DataOutputStream(s.getOutputStream());
|
||||
|
||||
DataChecksum checksum = DataChecksum.newDataChecksum(
|
||||
DataChecksum.CHECKSUM_CRC32, 512);
|
||||
DataChecksum.Type.CRC32, 512);
|
||||
new Sender(out).writeBlock(block.getBlock(),
|
||||
BlockTokenSecretManager.DUMMY_TOKEN, "",
|
||||
new DatanodeInfo[0], null,
|
||||
|
|
|
@ -67,7 +67,7 @@ public class TestSimulatedFSDataset {
|
|||
// data written
|
||||
ReplicaInPipelineInterface bInfo = fsdataset.createRbw(b);
|
||||
ReplicaOutputStreams out = bInfo.createStreams(true,
|
||||
DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512));
|
||||
DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512));
|
||||
try {
|
||||
OutputStream dataOut = out.getDataOut();
|
||||
assertEquals(0, fsdataset.getLength(b));
|
||||
|
@ -119,7 +119,7 @@ public class TestSimulatedFSDataset {
|
|||
short version = metaDataInput.readShort();
|
||||
assertEquals(BlockMetadataHeader.VERSION, version);
|
||||
DataChecksum checksum = DataChecksum.newDataChecksum(metaDataInput);
|
||||
assertEquals(DataChecksum.CHECKSUM_NULL, checksum.getChecksumType());
|
||||
assertEquals(DataChecksum.Type.NULL, checksum.getChecksumType());
|
||||
assertEquals(0, checksum.getChecksumSize());
|
||||
}
|
||||
|
||||
|
|
|
@ -71,7 +71,7 @@ public class IFileInputStream extends InputStream {
|
|||
public IFileInputStream(InputStream in, long len, Configuration conf) {
|
||||
this.in = in;
|
||||
this.inFd = getFileDescriptorIfAvail(in);
|
||||
sum = DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32,
|
||||
sum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32,
|
||||
Integer.MAX_VALUE);
|
||||
checksumSize = sum.getChecksumSize();
|
||||
length = len;
|
||||
|
|
|
@ -49,7 +49,7 @@ public class IFileOutputStream extends FilterOutputStream {
|
|||
*/
|
||||
public IFileOutputStream(OutputStream out) {
|
||||
super(out);
|
||||
sum = DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32,
|
||||
sum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32,
|
||||
Integer.MAX_VALUE);
|
||||
barray = new byte[sum.getChecksumSize()];
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue