From 1294ad1df72fef812fa2d2ba652f94008f1c114c Mon Sep 17 00:00:00 2001 From: Tsz-wo Sze Date: Thu, 16 Aug 2012 01:42:16 +0000 Subject: [PATCH] svn merge -c 1373683 from trunk for HADOOP-8700. Use enum to define the checksum constants in DataChecksum. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1373686 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 3 + .../org/apache/hadoop/util/DataChecksum.java | 108 +++++++++--------- .../apache/hadoop/util/TestDataChecksum.java | 21 ++-- .../apache/hadoop/hdfs/BlockReaderLocal.java | 2 +- .../org/apache/hadoop/hdfs/DFSClient.java | 23 ++-- .../datatransfer/DataTransferProtoUtil.java | 20 +--- .../hdfs/server/datanode/BlockSender.java | 4 +- .../hadoop/hdfs/TestDataTransferProtocol.java | 2 +- .../server/datanode/SimulatedFSDataset.java | 4 +- .../server/datanode/TestBlockRecovery.java | 2 +- .../hdfs/server/datanode/TestDiskError.java | 2 +- .../datanode/TestSimulatedFSDataset.java | 4 +- .../hadoop/mapred/IFileInputStream.java | 2 +- .../hadoop/mapred/IFileOutputStream.java | 2 +- 14 files changed, 93 insertions(+), 106 deletions(-) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index e14550d7b5a..562c6571553 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -89,6 +89,9 @@ Release 2.0.1-alpha - UNRELEASED HADOOP-8278. Make sure components declare correct set of dependencies. (tomwhite) + HADOOP-8700. Use enum to define the checksum constants in DataChecksum. + (szetszwo) + BUG FIXES HADOOP-8372. NetUtils.normalizeHostName() incorrectly handles hostname diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java index 74e2be639c4..27a3c400a40 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java @@ -43,31 +43,44 @@ public class DataChecksum implements Checksum { public static final int CHECKSUM_NULL = 0; public static final int CHECKSUM_CRC32 = 1; public static final int CHECKSUM_CRC32C = 2; - - private static String[] NAMES = new String[] { - "NULL", "CRC32", "CRC32C" - }; - - private static final int CHECKSUM_NULL_SIZE = 0; - private static final int CHECKSUM_CRC32_SIZE = 4; - private static final int CHECKSUM_CRC32C_SIZE = 4; - - - public static DataChecksum newDataChecksum( int type, int bytesPerChecksum ) { + + /** The checksum types */ + public static enum Type { + NULL (CHECKSUM_NULL, 0), + CRC32 (CHECKSUM_CRC32, 4), + CRC32C(CHECKSUM_CRC32C, 4); + + public final int id; + public final int size; + + private Type(int id, int size) { + this.id = id; + this.size = size; + } + + /** @return the type corresponding to the id. */ + public static Type valueOf(int id) { + if (id < 0 || id >= values().length) { + throw new IllegalArgumentException("id=" + id + + " out of range [0, " + values().length + ")"); + } + return values()[id]; + } + } + + + public static DataChecksum newDataChecksum(Type type, int bytesPerChecksum ) { if ( bytesPerChecksum <= 0 ) { return null; } switch ( type ) { - case CHECKSUM_NULL : - return new DataChecksum( CHECKSUM_NULL, new ChecksumNull(), - CHECKSUM_NULL_SIZE, bytesPerChecksum ); - case CHECKSUM_CRC32 : - return new DataChecksum( CHECKSUM_CRC32, new PureJavaCrc32(), - CHECKSUM_CRC32_SIZE, bytesPerChecksum ); - case CHECKSUM_CRC32C: - return new DataChecksum( CHECKSUM_CRC32C, new PureJavaCrc32C(), - CHECKSUM_CRC32C_SIZE, bytesPerChecksum); + case NULL : + return new DataChecksum(type, new ChecksumNull(), bytesPerChecksum ); + case CRC32 : + return new DataChecksum(type, new PureJavaCrc32(), bytesPerChecksum ); + case CRC32C: + return new DataChecksum(type, new PureJavaCrc32C(), bytesPerChecksum); default: return null; } @@ -87,7 +100,7 @@ public class DataChecksum implements Checksum { ( (bytes[offset+2] & 0xff) << 16 ) | ( (bytes[offset+3] & 0xff) << 8 ) | ( (bytes[offset+4] & 0xff) ); - return newDataChecksum( bytes[0], bytesPerChecksum ); + return newDataChecksum( Type.valueOf(bytes[0]), bytesPerChecksum ); } /** @@ -98,7 +111,7 @@ public class DataChecksum implements Checksum { throws IOException { int type = in.readByte(); int bpc = in.readInt(); - DataChecksum summer = newDataChecksum( type, bpc ); + DataChecksum summer = newDataChecksum(Type.valueOf(type), bpc ); if ( summer == null ) { throw new IOException( "Could not create DataChecksum of type " + type + " with bytesPerChecksum " + bpc ); @@ -111,13 +124,13 @@ public class DataChecksum implements Checksum { */ public void writeHeader( DataOutputStream out ) throws IOException { - out.writeByte( type ); + out.writeByte( type.id ); out.writeInt( bytesPerChecksum ); } public byte[] getHeader() { byte[] header = new byte[DataChecksum.HEADER_LEN]; - header[0] = (byte) (type & 0xff); + header[0] = (byte) (type.id & 0xff); // Writing in buffer just like DataOutput.WriteInt() header[1+0] = (byte) ((bytesPerChecksum >>> 24) & 0xff); header[1+1] = (byte) ((bytesPerChecksum >>> 16) & 0xff); @@ -133,11 +146,11 @@ public class DataChecksum implements Checksum { */ public int writeValue( DataOutputStream out, boolean reset ) throws IOException { - if ( size <= 0 ) { + if ( type.size <= 0 ) { return 0; } - if ( size == 4 ) { + if ( type.size == 4 ) { out.writeInt( (int) summer.getValue() ); } else { throw new IOException( "Unknown Checksum " + type ); @@ -147,7 +160,7 @@ public class DataChecksum implements Checksum { reset(); } - return size; + return type.size; } /** @@ -157,11 +170,11 @@ public class DataChecksum implements Checksum { */ public int writeValue( byte[] buf, int offset, boolean reset ) throws IOException { - if ( size <= 0 ) { + if ( type.size <= 0 ) { return 0; } - if ( size == 4 ) { + if ( type.size == 4 ) { int checksum = (int) summer.getValue(); buf[offset+0] = (byte) ((checksum >>> 24) & 0xff); buf[offset+1] = (byte) ((checksum >>> 16) & 0xff); @@ -175,7 +188,7 @@ public class DataChecksum implements Checksum { reset(); } - return size; + return type.size; } /** @@ -183,36 +196,33 @@ public class DataChecksum implements Checksum { * @return true if the checksum matches and false otherwise. */ public boolean compare( byte buf[], int offset ) { - if ( size == 4 ) { + if ( type.size == 4 ) { int checksum = ( (buf[offset+0] & 0xff) << 24 ) | ( (buf[offset+1] & 0xff) << 16 ) | ( (buf[offset+2] & 0xff) << 8 ) | ( (buf[offset+3] & 0xff) ); return checksum == (int) summer.getValue(); } - return size == 0; + return type.size == 0; } - private final int type; - private final int size; + private final Type type; private final Checksum summer; private final int bytesPerChecksum; private int inSum = 0; - private DataChecksum( int checksumType, Checksum checksum, - int sumSize, int chunkSize ) { - type = checksumType; + private DataChecksum( Type type, Checksum checksum, int chunkSize ) { + this.type = type; summer = checksum; - size = sumSize; bytesPerChecksum = chunkSize; } // Accessors - public int getChecksumType() { + public Type getChecksumType() { return type; } public int getChecksumSize() { - return size; + return type.size; } public int getBytesPerChecksum() { return bytesPerChecksum; @@ -260,7 +270,7 @@ public class DataChecksum implements Checksum { public void verifyChunkedSums(ByteBuffer data, ByteBuffer checksums, String fileName, long basePos) throws ChecksumException { - if (size == 0) return; + if (type.size == 0) return; if (data.hasArray() && checksums.hasArray()) { verifyChunkedSums( @@ -270,7 +280,7 @@ public class DataChecksum implements Checksum { return; } if (NativeCrc32.isAvailable()) { - NativeCrc32.verifyChunkedSums(bytesPerChecksum, type, checksums, data, + NativeCrc32.verifyChunkedSums(bytesPerChecksum, type.id, checksums, data, fileName, basePos); return; } @@ -280,7 +290,7 @@ public class DataChecksum implements Checksum { checksums.mark(); try { byte[] buf = new byte[bytesPerChecksum]; - byte[] sum = new byte[size]; + byte[] sum = new byte[type.size]; while (data.remaining() > 0) { int n = Math.min(data.remaining(), bytesPerChecksum); checksums.get(sum); @@ -351,7 +361,7 @@ public class DataChecksum implements Checksum { * buffer to put the checksums. */ public void calculateChunkedSums(ByteBuffer data, ByteBuffer checksums) { - if (size == 0) return; + if (type.size == 0) return; if (data.hasArray() && checksums.hasArray()) { calculateChunkedSums(data.array(), data.arrayOffset() + data.position(), data.remaining(), @@ -411,18 +421,12 @@ public class DataChecksum implements Checksum { @Override public int hashCode() { - return (this.type + 31) * this.bytesPerChecksum; + return (this.type.id + 31) * this.bytesPerChecksum; } @Override public String toString() { - String strType; - if (type < NAMES.length && type > 0) { - strType = NAMES[type]; - } else { - strType = String.valueOf(type); - } - return "DataChecksum(type=" + strType + + return "DataChecksum(type=" + type + ", chunkSize=" + bytesPerChecksum + ")"; } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java index e8d1670fade..1e523da6948 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java @@ -35,13 +35,13 @@ public class TestDataChecksum { private static final int DATA_TRAILER_IN_BUFFER = 3; private static final int BYTES_PER_CHUNK = 512; - private static final int CHECKSUM_TYPES[] = new int[] { - DataChecksum.CHECKSUM_CRC32, DataChecksum.CHECKSUM_CRC32C + private static final DataChecksum.Type CHECKSUM_TYPES[] = { + DataChecksum.Type.CRC32, DataChecksum.Type.CRC32C }; @Test public void testBulkOps() throws Exception { - for (int type : CHECKSUM_TYPES) { + for (DataChecksum.Type type : CHECKSUM_TYPES) { System.err.println( "---- beginning tests with checksum type " + type + "----"); DataChecksum checksum = DataChecksum.newDataChecksum( @@ -118,21 +118,20 @@ public class TestDataChecksum { @Test public void testEquality() { assertEquals( - DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512), - DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512)); + DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512), + DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512)); assertFalse( - DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512).equals( - DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 1024))); + DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512).equals( + DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 1024))); assertFalse( - DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512).equals( - DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32C, 512))); + DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512).equals( + DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 512))); } @Test public void testToString() { assertEquals("DataChecksum(type=CRC32, chunkSize=512)", - DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512) - .toString()); + DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512).toString()); } private static void corruptBufferOffset(ByteBuffer buf, int offset) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java index 7d4fb7a0610..8b1f0bdc0a6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/BlockReaderLocal.java @@ -290,7 +290,7 @@ class BlockReaderLocal implements BlockReader { long length, BlockLocalPathInfo pathinfo, FileInputStream dataIn) throws IOException { this(conf, hdfsfile, block, token, startOffset, length, pathinfo, - DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_NULL, 4), false, + DataChecksum.newDataChecksum(DataChecksum.Type.NULL, 4), false, dataIn, startOffset, null); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java index 7351806f8c2..788f10f8c79 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java @@ -202,7 +202,7 @@ public class DFSClient implements java.io.Closeable { final int maxBlockAcquireFailures; final int confTime; final int ioBufferSize; - final int checksumType; + final DataChecksum.Type checksumType; final int bytesPerChecksum; final int writePacketSize; final int socketTimeout; @@ -273,18 +273,17 @@ public class DFSClient implements java.io.Closeable { DFS_CLIENT_USE_DN_HOSTNAME_DEFAULT); } - private int getChecksumType(Configuration conf) { - String checksum = conf.get(DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY, + private DataChecksum.Type getChecksumType(Configuration conf) { + final String checksum = conf.get( + DFSConfigKeys.DFS_CHECKSUM_TYPE_KEY, DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT); - if ("CRC32".equals(checksum)) { - return DataChecksum.CHECKSUM_CRC32; - } else if ("CRC32C".equals(checksum)) { - return DataChecksum.CHECKSUM_CRC32C; - } else if ("NULL".equals(checksum)) { - return DataChecksum.CHECKSUM_NULL; - } else { - LOG.warn("Bad checksum type: " + checksum + ". Using default."); - return DataChecksum.CHECKSUM_CRC32C; + try { + return DataChecksum.Type.valueOf(checksum); + } catch(IllegalArgumentException iae) { + LOG.warn("Bad checksum type: " + checksum + ". Using default " + + DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT); + return DataChecksum.Type.valueOf( + DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java index 598d41e472b..0a4d20fa958 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java @@ -31,9 +31,6 @@ import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.DataChecksum; -import com.google.common.collect.BiMap; -import com.google.common.collect.ImmutableBiMap; - /** * Static utilities for dealing with the protocol buffers used by the @@ -42,19 +39,6 @@ import com.google.common.collect.ImmutableBiMap; @InterfaceAudience.Private @InterfaceStability.Evolving public abstract class DataTransferProtoUtil { - - /** - * Map between the internal DataChecksum identifiers and the protobuf- - * generated identifiers on the wire. - */ - static BiMap checksumTypeMap = - ImmutableBiMap.builder() - .put(DataChecksum.CHECKSUM_CRC32, ChecksumProto.ChecksumType.CRC32) - .put(DataChecksum.CHECKSUM_CRC32C, ChecksumProto.ChecksumType.CRC32C) - .put(DataChecksum.CHECKSUM_NULL, ChecksumProto.ChecksumType.NULL) - .build(); - - static BlockConstructionStage fromProto( OpWriteBlockProto.BlockConstructionStage stage) { return BlockConstructionStage.valueOf(BlockConstructionStage.class, @@ -68,7 +52,7 @@ public abstract class DataTransferProtoUtil { } public static ChecksumProto toProto(DataChecksum checksum) { - ChecksumType type = checksumTypeMap.get(checksum.getChecksumType()); + ChecksumType type = ChecksumType.valueOf(checksum.getChecksumType().name()); if (type == null) { throw new IllegalArgumentException( "Can't convert checksum to protobuf: " + checksum); @@ -84,7 +68,7 @@ public abstract class DataTransferProtoUtil { if (proto == null) return null; int bytesPerChecksum = proto.getBytesPerChecksum(); - int type = checksumTypeMap.inverse().get(proto.getType()); + DataChecksum.Type type = DataChecksum.Type.valueOf(proto.getType().name()); return DataChecksum.newDataChecksum(type, bytesPerChecksum); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java index 6d38febab90..fe68c4076d8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java @@ -40,7 +40,6 @@ import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader; import org.apache.hadoop.hdfs.util.DataTransferThrottler; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.LongWritable; -import org.apache.hadoop.io.ReadaheadPool; import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest; import org.apache.hadoop.io.nativeio.NativeIO; import org.apache.hadoop.net.SocketOutputStream; @@ -236,8 +235,7 @@ class BlockSender implements java.io.Closeable { } else { LOG.warn("Could not find metadata file for " + block); // This only decides the buffer size. Use BUFFER_SIZE? - csum = DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_NULL, - 16 * 1024); + csum = DataChecksum.newDataChecksum(DataChecksum.Type.NULL, 16 * 1024); } /* diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferProtocol.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferProtocol.java index f0ff407dc2b..92ac17afdd5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferProtocol.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDataTransferProtocol.java @@ -75,7 +75,7 @@ public class TestDataTransferProtocol { "org.apache.hadoop.hdfs.TestDataTransferProtocol"); private static final DataChecksum DEFAULT_CHECKSUM = - DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32C, 512); + DataChecksum.newDataChecksum(DataChecksum.Type.CRC32C, 512); DatanodeID datanode; InetSocketAddress dnAddr; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java index 210ef0ee5fc..d2bf5050708 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/SimulatedFSDataset.java @@ -94,8 +94,8 @@ public class SimulatedFSDataset implements FsDatasetSpi { static final byte[] nullCrcFileData; static { - DataChecksum checksum = DataChecksum.newDataChecksum( DataChecksum. - CHECKSUM_NULL, 16*1024 ); + DataChecksum checksum = DataChecksum.newDataChecksum( + DataChecksum.Type.NULL, 16*1024 ); byte[] nullCrcHeader = checksum.getHeader(); nullCrcFileData = new byte[2 + nullCrcHeader.length]; nullCrcFileData[0] = (byte) ((BlockMetadataHeader.VERSION >>> 8) & 0xff); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java index 1a6c9ec8c29..c62206f7e29 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestBlockRecovery.java @@ -550,7 +550,7 @@ public class TestBlockRecovery { ReplicaOutputStreams streams = null; try { streams = replicaInfo.createStreams(true, - DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512)); + DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512)); streams.getChecksumOut().write('a'); dn.data.initReplicaRecovery(new RecoveringBlock(block, null, RECOVERY_ID+1)); try { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java index 7a83bf3408e..3c10a7ae12a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDiskError.java @@ -142,7 +142,7 @@ public class TestDiskError { DataOutputStream out = new DataOutputStream(s.getOutputStream()); DataChecksum checksum = DataChecksum.newDataChecksum( - DataChecksum.CHECKSUM_CRC32, 512); + DataChecksum.Type.CRC32, 512); new Sender(out).writeBlock(block.getBlock(), BlockTokenSecretManager.DUMMY_TOKEN, "", new DatanodeInfo[0], null, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestSimulatedFSDataset.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestSimulatedFSDataset.java index 1277f21fef8..e8630091f65 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestSimulatedFSDataset.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestSimulatedFSDataset.java @@ -67,7 +67,7 @@ public class TestSimulatedFSDataset { // data written ReplicaInPipelineInterface bInfo = fsdataset.createRbw(b); ReplicaOutputStreams out = bInfo.createStreams(true, - DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, 512)); + DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, 512)); try { OutputStream dataOut = out.getDataOut(); assertEquals(0, fsdataset.getLength(b)); @@ -119,7 +119,7 @@ public class TestSimulatedFSDataset { short version = metaDataInput.readShort(); assertEquals(BlockMetadataHeader.VERSION, version); DataChecksum checksum = DataChecksum.newDataChecksum(metaDataInput); - assertEquals(DataChecksum.CHECKSUM_NULL, checksum.getChecksumType()); + assertEquals(DataChecksum.Type.NULL, checksum.getChecksumType()); assertEquals(0, checksum.getChecksumSize()); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileInputStream.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileInputStream.java index b171fb0e474..02cbce3d9fd 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileInputStream.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileInputStream.java @@ -71,7 +71,7 @@ public class IFileInputStream extends InputStream { public IFileInputStream(InputStream in, long len, Configuration conf) { this.in = in; this.inFd = getFileDescriptorIfAvail(in); - sum = DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, + sum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, Integer.MAX_VALUE); checksumSize = sum.getChecksumSize(); length = len; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileOutputStream.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileOutputStream.java index c352ffdf68c..8f25ba7a0a9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileOutputStream.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/IFileOutputStream.java @@ -49,7 +49,7 @@ public class IFileOutputStream extends FilterOutputStream { */ public IFileOutputStream(OutputStream out) { super(out); - sum = DataChecksum.newDataChecksum(DataChecksum.CHECKSUM_CRC32, + sum = DataChecksum.newDataChecksum(DataChecksum.Type.CRC32, Integer.MAX_VALUE); barray = new byte[sum.getChecksumSize()]; }