From 8b0db38ec81b2c77cc7f52d94f729a39c07dae7f Mon Sep 17 00:00:00 2001 From: Suresh Srinivas Date: Mon, 5 Nov 2012 23:56:01 +0000 Subject: [PATCH] HDFS-4046. Merging changes 1406011 and 1406012 from trunk. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1406013 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop/hdfs/protocol/HdfsProtoUtil.java | 4 +- .../datatransfer/DataTransferProtoUtil.java | 4 +- .../hadoop/hdfs/protocolPB/PBHelper.java | 7 ++-- .../src/main/proto/datatransfer.proto | 2 +- .../hadoop-hdfs/src/main/proto/hdfs.proto | 10 +++-- .../hdfs/protocol/TestHdfsProtoUtil.java | 42 +++++++++++++++++++ 6 files changed, 56 insertions(+), 13 deletions(-) create mode 100644 hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/TestHdfsProtoUtil.java diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsProtoUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsProtoUtil.java index 5bd8a0806f3..fe7446f6740 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsProtoUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsProtoUtil.java @@ -157,11 +157,11 @@ public abstract class HdfsProtoUtil { } public static DataChecksum.Type fromProto(HdfsProtos.ChecksumTypeProto type) { - return DataChecksum.Type.valueOf(type.name()); + return DataChecksum.Type.valueOf(type.getNumber()); } public static HdfsProtos.ChecksumTypeProto toProto(DataChecksum.Type type) { - return HdfsProtos.ChecksumTypeProto.valueOf(type.name()); + return HdfsProtos.ChecksumTypeProto.valueOf(type.id); } public static InputStream vintPrefixed(final InputStream input) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java index dd9beb535bc..d2b04ed2bdf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferProtoUtil.java @@ -52,7 +52,7 @@ public abstract class DataTransferProtoUtil { } public static ChecksumProto toProto(DataChecksum checksum) { - ChecksumTypeProto type = ChecksumTypeProto.valueOf(checksum.getChecksumType().name()); + ChecksumTypeProto type = HdfsProtoUtil.toProto(checksum.getChecksumType()); if (type == null) { throw new IllegalArgumentException( "Can't convert checksum to protobuf: " + checksum); @@ -68,7 +68,7 @@ public abstract class DataTransferProtoUtil { if (proto == null) return null; int bytesPerChecksum = proto.getBytesPerChecksum(); - DataChecksum.Type type = DataChecksum.Type.valueOf(proto.getType().name()); + DataChecksum.Type type = HdfsProtoUtil.fromProto(proto.getType()); return DataChecksum.newDataChecksum(type, bytesPerChecksum); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java index 09032baf398..d197d07ace2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java @@ -39,6 +39,7 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType; import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.HdfsLocatedFileStatus; +import org.apache.hadoop.hdfs.protocol.HdfsProtoUtil; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos; @@ -67,7 +68,6 @@ import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto; -import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ChecksumTypeProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ContentSummaryProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CorruptFileBlocksProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto; @@ -129,7 +129,6 @@ import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest; import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.Text; -import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.security.token.Token; import com.google.protobuf.ByteString; @@ -958,7 +957,7 @@ public class PBHelper { fs.getFileBufferSize(), fs.getEncryptDataTransfer(), fs.getTrashInterval(), - DataChecksum.Type.valueOf(fs.getChecksumType().name())); + HdfsProtoUtil.fromProto(fs.getChecksumType())); } public static FsServerDefaultsProto convert(FsServerDefaults fs) { @@ -971,7 +970,7 @@ public class PBHelper { .setFileBufferSize(fs.getFileBufferSize()) .setEncryptDataTransfer(fs.getEncryptDataTransfer()) .setTrashInterval(fs.getTrashInterval()) - .setChecksumType(ChecksumTypeProto.valueOf(fs.getChecksumType().name())) + .setChecksumType(HdfsProtoUtil.toProto(fs.getChecksumType())) .build(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/datatransfer.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/datatransfer.proto index ae42c8358b3..d202f79a97a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/datatransfer.proto +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/datatransfer.proto @@ -181,5 +181,5 @@ message OpBlockChecksumResponseProto { required uint32 bytesPerCrc = 1; required uint64 crcPerBlock = 2; required bytes md5 = 3; - optional ChecksumTypeProto crcType = 4 [default = CRC32]; + optional ChecksumTypeProto crcType = 4 [default = CHECKSUM_CRC32]; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto index 8e98f2c9aeb..f4622510cdf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto @@ -181,11 +181,13 @@ message HdfsFileStatusProto { /** * Checksum algorithms/types used in HDFS + * Make sure this enum's integer values match enum values' id properties defined + * in org.apache.hadoop.util.DataChecksum.Type */ enum ChecksumTypeProto { - NULL = 0; - CRC32 = 1; - CRC32C = 2; + CHECKSUM_NULL = 0; + CHECKSUM_CRC32 = 1; + CHECKSUM_CRC32C = 2; } /** @@ -199,7 +201,7 @@ message FsServerDefaultsProto { required uint32 fileBufferSize = 5; optional bool encryptDataTransfer = 6 [default = false]; optional uint64 trashInterval = 7 [default = 0]; - optional ChecksumTypeProto checksumType = 8 [default = CRC32]; + optional ChecksumTypeProto checksumType = 8 [default = CHECKSUM_CRC32]; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/TestHdfsProtoUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/TestHdfsProtoUtil.java new file mode 100644 index 00000000000..0a04e3c521f --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/TestHdfsProtoUtil.java @@ -0,0 +1,42 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.protocol; + +import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos; +import org.apache.hadoop.util.DataChecksum; +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +public class TestHdfsProtoUtil { + @Test + public void testChecksumTypeProto() { + assertEquals(DataChecksum.Type.NULL, + HdfsProtoUtil.fromProto(HdfsProtos.ChecksumTypeProto.CHECKSUM_NULL)); + assertEquals(DataChecksum.Type.CRC32, + HdfsProtoUtil.fromProto(HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32)); + assertEquals(DataChecksum.Type.CRC32C, + HdfsProtoUtil.fromProto(HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32C)); + assertEquals(HdfsProtoUtil.toProto(DataChecksum.Type.NULL), + HdfsProtos.ChecksumTypeProto.CHECKSUM_NULL); + assertEquals(HdfsProtoUtil.toProto(DataChecksum.Type.CRC32), + HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32); + assertEquals(HdfsProtoUtil.toProto(DataChecksum.Type.CRC32C), + HdfsProtos.ChecksumTypeProto.CHECKSUM_CRC32C); + } +}