svn merge -c 1309114 from trunk for HDFS-3176. Use MD5MD5CRC32FileChecksum.readFields() in JsonUtil .
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1309116 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
18a6e47b98
commit
40a8dfa3c6
|
@ -627,6 +627,9 @@ Release 0.23.3 - UNRELEASED
|
|||
|
||||
HDFS-3166. Add timeout to Hftp connections. (Daryn Sharp via szetszwo)
|
||||
|
||||
HDFS-3176. Use MD5MD5CRC32FileChecksum.readFields() in JsonUtil . (Kihwal
|
||||
Lee via szetszwo)
|
||||
|
||||
Release 0.23.2 - UNRELEASED
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
|
|
@ -513,18 +513,13 @@ public class JsonUtil {
|
|||
final byte[] bytes = StringUtils.hexStringToByte((String)m.get("bytes"));
|
||||
|
||||
final DataInputStream in = new DataInputStream(new ByteArrayInputStream(bytes));
|
||||
final int bytesPerCRC = in.readInt();
|
||||
final long crcPerBlock = in.readLong();
|
||||
final MD5Hash md5 = MD5Hash.read(in);
|
||||
final MD5MD5CRC32FileChecksum checksum = new MD5MD5CRC32FileChecksum(
|
||||
bytesPerCRC, crcPerBlock, md5);
|
||||
final MD5MD5CRC32FileChecksum checksum = new MD5MD5CRC32FileChecksum();
|
||||
checksum.readFields(in);
|
||||
|
||||
//check algorithm name
|
||||
final String alg = "MD5-of-" + crcPerBlock + "MD5-of-" + bytesPerCRC + "CRC32";
|
||||
if (!alg.equals(algorithm)) {
|
||||
throw new IOException("Algorithm not matched: algorithm=" + algorithm
|
||||
+ ", crcPerBlock=" + crcPerBlock
|
||||
+ ", bytesPerCRC=" + bytesPerCRC);
|
||||
if (!checksum.getAlgorithmName().equals(algorithm)) {
|
||||
throw new IOException("Algorithm not matched. Expected " + algorithm
|
||||
+ ", Received " + checksum.getAlgorithmName());
|
||||
}
|
||||
//check length
|
||||
if (length != checksum.getLength()) {
|
||||
|
|
Loading…
Reference in New Issue