From 1654497f98fb7f2de8214d2fbad305b7a2854816 Mon Sep 17 00:00:00 2001 From: Vinayakumar B Date: Fri, 20 Sep 2019 16:08:30 +0530 Subject: [PATCH] HADOOP-16557. [pb-upgrade] Upgrade protobuf.version to 3.7.1 (#1432) HADOOP-16557. [pb-upgrade] Upgrade protobuf.version to 3.7.1. Contributed by Vinayakumar B. --- BUILDING.txt | 25 ++++++++++++------- .../hadoop-client-runtime/pom.xml | 7 ++++++ .../apache/hadoop/ipc/RemoteException.java | 2 +- .../org/apache/hadoop/ipc/RpcWritable.java | 2 +- .../java/org/apache/hadoop/ipc/Server.java | 6 ++--- .../org/apache/hadoop/util/TestProtoUtil.java | 2 +- .../protocol/datatransfer/PipelineAck.java | 2 +- .../hdfs/protocolPB/PBHelperClient.java | 17 +++++++------ .../pb/FederationProtocolPBTranslator.java | 4 +-- .../hdfs/protocol/BlockListAsLongs.java | 6 ++--- .../namenode/FSImageFormatProtobuf.java | 5 ++-- .../offlineImageViewer/PBImageXmlWriter.java | 2 +- .../hadoop/hdfs/protocolPB/TestPBHelper.java | 2 +- .../server/datanode/TestLargeBlockReport.java | 3 +++ hadoop-project/pom.xml | 5 +++- .../hdfs/server/namenode/ImageWriter.java | 4 +-- .../hadoop-yarn/hadoop-yarn-api/pom.xml | 1 + .../PlacementConstraintToProtoConverter.java | 22 ++++++++-------- .../pb/NodePublishVolumeRequestPBImpl.java | 4 +-- ...lidateVolumeCapabilitiesRequestPBImpl.java | 4 +-- 20 files changed, 74 insertions(+), 51 deletions(-) diff --git a/BUILDING.txt b/BUILDING.txt index 640ee069e56..6f33a60f411 100644 --- a/BUILDING.txt +++ b/BUILDING.txt @@ -6,7 +6,7 @@ Requirements: * Unix System * JDK 1.8 * Maven 3.3 or later -* ProtocolBuffer 2.5.0 +* ProtocolBuffer 3.7.1 * CMake 3.1 or newer (if compiling native code) * Zlib devel (if compiling native code) * Cyrus SASL devel (if compiling native code) @@ -62,8 +62,16 @@ Installing required packages for clean install of Ubuntu 14.04 LTS Desktop: $ sudo apt-get -y install maven * Native libraries $ sudo apt-get -y install build-essential autoconf automake libtool cmake zlib1g-dev pkg-config libssl-dev libsasl2-dev -* ProtocolBuffer 2.5.0 (required) - $ sudo apt-get -y install protobuf-compiler +* ProtocolBuffer 3.7.1 (required) + $ mkdir -p /opt/protobuf-3.7-src \ + && curl -L -s -S \ + https://github.com/protocolbuffers/protobuf/releases/download/v3.7.1/protobuf-java-3.7.1.tar.gz \ + -o /opt/protobuf-3.7.1.tar.gz \ + && tar xzf /opt/protobuf-3.7.1.tar.gz --strip-components 1 -C /opt/protobuf-3.7-src \ + && cd /opt/protobuf-3.7-src \ + && ./configure\ + && make install \ + && rm -rf /opt/protobuf-3.7-src Optional packages: @@ -397,11 +405,10 @@ Installing required dependencies for clean install of macOS 10.14: * Install native libraries, only openssl is required to compile native code, you may optionally install zlib, lz4, etc. $ brew install openssl -* Protocol Buffers 2.5.0 (required), since 2.5.0 is no longer in Homebrew, -we need to compile it from source - $ wget https://github.com/protocolbuffers/protobuf/releases/download/v2.5.0/protobuf-2.5.0.tar.gz - $ tar zxvf protobuf-2.5.0.tar.gz - $ cd protobuf-2.5.0 +* Protocol Buffers 3.7.1 (required) + $ wget https://github.com/protocolbuffers/protobuf/releases/download/v3.7.1/protobuf-java-3.7.1.tar.gz + $ mkdir -p protobuf-3.7 && tar zxvf protobuf-java-3.7.1.tar.gz --strip-components 1 -C protobuf-3.7 + $ cd protobuf-3.7 $ ./configure $ make $ make check @@ -432,7 +439,7 @@ Requirements: * Windows System * JDK 1.8 * Maven 3.0 or later -* ProtocolBuffer 2.5.0 +* ProtocolBuffer 3.7.1 * CMake 3.1 or newer * Visual Studio 2010 Professional or Higher * Windows SDK 8.1 (if building CPU rate control for the container executor) diff --git a/hadoop-client-modules/hadoop-client-runtime/pom.xml b/hadoop-client-modules/hadoop-client-runtime/pom.xml index cb1a2f970c7..565e9682d51 100644 --- a/hadoop-client-modules/hadoop-client-runtime/pom.xml +++ b/hadoop-client-modules/hadoop-client-runtime/pom.xml @@ -229,6 +229,13 @@ update* + + com.google.protobuf:protobuf-java + + google/protobuf/*.proto + google/protobuf/**/*.proto + + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java index 36e280f3999..f1142d35e72 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java @@ -66,7 +66,7 @@ public class RemoteException extends IOException { * @return may be null if the code was newer than our protobuf definitions or none was given. */ public RpcErrorCodeProto getErrorCode() { - return RpcErrorCodeProto.valueOf(errorCode); + return RpcErrorCodeProto.forNumber(errorCode); } /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java index 54fb98e80d8..a97af87bdfb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java @@ -106,7 +106,7 @@ public abstract class RpcWritable implements Writable { @Override void writeTo(ResponseBuffer out) throws IOException { int length = message.getSerializedSize(); - length += CodedOutputStream.computeRawVarint32Size(length); + length += CodedOutputStream.computeUInt32SizeNoTag(length); out.ensureCapacity(length); message.writeDelimitedTo(out); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index df19de293f7..36785e147d7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -3274,10 +3274,10 @@ public abstract class Server { cos.writeRawByte((byte)((length >>> 16) & 0xFF)); cos.writeRawByte((byte)((length >>> 8) & 0xFF)); cos.writeRawByte((byte)((length >>> 0) & 0xFF)); - cos.writeRawVarint32(header.getSerializedSize()); + cos.writeUInt32NoTag(header.getSerializedSize()); header.writeTo(cos); if (payload != null) { - cos.writeRawVarint32(payload.getSerializedSize()); + cos.writeUInt32NoTag(payload.getSerializedSize()); payload.writeTo(cos); } return buf; @@ -3285,7 +3285,7 @@ public abstract class Server { private static int getDelimitedLength(Message message) { int length = message.getSerializedSize(); - return length + CodedOutputStream.computeRawVarint32Size(length); + return length + CodedOutputStream.computeUInt32SizeNoTag(length); } /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestProtoUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestProtoUtil.java index ab891b8f200..6b72089faab 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestProtoUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestProtoUtil.java @@ -69,7 +69,7 @@ public class TestProtoUtil { private void doVarIntTest(int value) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); CodedOutputStream cout = CodedOutputStream.newInstance(baos); - cout.writeRawVarint32(value); + cout.writeUInt32NoTag(value); cout.flush(); DataInputStream dis = new DataInputStream( diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PipelineAck.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PipelineAck.java index be822d664f8..a55125f0722 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PipelineAck.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/PipelineAck.java @@ -75,7 +75,7 @@ public class PipelineAck { } static Status getStatus(int header) { - return Status.valueOf((int) STATUS.BITS.retrieve(header)); + return Status.forNumber((int) STATUS.BITS.retrieve(header)); } static ECN getECN(int header) { diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java index 1b2703755c6..691ac54ff29 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java @@ -293,7 +293,7 @@ public class PBHelperClient { } public static HdfsProtos.ChecksumTypeProto convert(DataChecksum.Type type) { - return HdfsProtos.ChecksumTypeProto.valueOf(type.id); + return HdfsProtos.ChecksumTypeProto.forNumber(type.id); } public static HdfsProtos.BlockChecksumTypeProto convert( @@ -1115,7 +1115,7 @@ public class PBHelperClient { } public static FsActionProto convert(FsAction v) { - return FsActionProto.valueOf(v != null ? v.ordinal() : 0); + return FsActionProto.forNumber(v != null ? v.ordinal() : 0); } public static XAttrProto convertXAttrProto(XAttr a) { @@ -1157,7 +1157,7 @@ public class PBHelperClient { } static XAttrNamespaceProto convert(XAttr.NameSpace v) { - return XAttrNamespaceProto.valueOf(v.ordinal()); + return XAttrNamespaceProto.forNumber(v.ordinal()); } static XAttr.NameSpace convert(XAttrNamespaceProto v) { @@ -1249,7 +1249,7 @@ public class PBHelperClient { } static AclEntryScopeProto convert(AclEntryScope v) { - return AclEntryScopeProto.valueOf(v.ordinal()); + return AclEntryScopeProto.forNumber(v.ordinal()); } private static AclEntryScope convert(AclEntryScopeProto v) { @@ -1257,7 +1257,7 @@ public class PBHelperClient { } static AclEntryTypeProto convert(AclEntryType e) { - return AclEntryTypeProto.valueOf(e.ordinal()); + return AclEntryTypeProto.forNumber(e.ordinal()); } private static AclEntryType convert(AclEntryTypeProto v) { @@ -3220,7 +3220,7 @@ public class PBHelperClient { public static HdfsProtos.ErasureCodingPolicyState convertECState( ErasureCodingPolicyState state) { - return HdfsProtos.ErasureCodingPolicyState.valueOf(state.getValue()); + return HdfsProtos.ErasureCodingPolicyState.forNumber(state.getValue()); } /** @@ -3356,7 +3356,7 @@ public class PBHelperClient { EnumSet flags) { List ret = new ArrayList<>(); for (AddBlockFlag flag : flags) { - AddBlockFlagProto abfp = AddBlockFlagProto.valueOf(flag.getMode()); + AddBlockFlagProto abfp = AddBlockFlagProto.forNumber(flag.getMode()); if (abfp != null) { ret.add(abfp); } @@ -3409,7 +3409,8 @@ public class PBHelperClient { EnumSet types) { List typeProtos = new ArrayList<>(); for (OpenFilesType type : types) { - OpenFilesTypeProto typeProto = OpenFilesTypeProto.valueOf(type.getMode()); + OpenFilesTypeProto typeProto = OpenFilesTypeProto + .forNumber(type.getMode()); if (typeProto != null) { typeProtos.add(typeProto); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/FederationProtocolPBTranslator.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/FederationProtocolPBTranslator.java index baad11352f8..31ab9daed30 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/FederationProtocolPBTranslator.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/store/protocol/impl/pb/FederationProtocolPBTranslator.java @@ -22,7 +22,7 @@ import java.lang.reflect.Method; import org.apache.commons.codec.binary.Base64; -import com.google.protobuf.GeneratedMessage; +import com.google.protobuf.GeneratedMessageV3; import com.google.protobuf.Message; import com.google.protobuf.Message.Builder; import com.google.protobuf.MessageOrBuilder; @@ -31,7 +31,7 @@ import com.google.protobuf.MessageOrBuilder; * Helper class for setting/getting data elements in an object backed by a * protobuf implementation. */ -public class FederationProtocolPBTranslator

{ /** Optional proto byte stream used to create this object. */ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java index 77e40b4389d..1f17ee2201f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/BlockListAsLongs.java @@ -276,12 +276,12 @@ public abstract class BlockListAsLongs implements Iterable { try { // zig-zag to reduce size of legacy blocks cos.writeSInt64NoTag(replica.getBlockId()); - cos.writeRawVarint64(replica.getBytesOnDisk()); - cos.writeRawVarint64(replica.getGenerationStamp()); + cos.writeUInt64NoTag(replica.getBytesOnDisk()); + cos.writeUInt64NoTag(replica.getGenerationStamp()); ReplicaState state = replica.getState(); // although state is not a 64-bit value, using a long varint to // allow for future use of the upper bits - cos.writeRawVarint64(state.getValue()); + cos.writeUInt64NoTag(state.getValue()); if (state == ReplicaState.FINALIZED) { numFinalized++; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java index 51379b86573..5950fa61d85 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSImageFormatProtobuf.java @@ -1015,8 +1015,9 @@ public final class FSImageFormatProtobuf { } } - private static int getOndiskTrunkSize(com.google.protobuf.GeneratedMessage s) { - return CodedOutputStream.computeRawVarint32Size(s.getSerializedSize()) + private static int getOndiskTrunkSize( + com.google.protobuf.GeneratedMessageV3 s) { + return CodedOutputStream.computeUInt32SizeNoTag(s.getSerializedSize()) + s.getSerializedSize(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java index cec44f51fe0..e84dec598aa 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/PBImageXmlWriter.java @@ -433,7 +433,7 @@ public final class PBImageXmlWriter { int ns = (XATTR_NAMESPACE_MASK & (encodedName >> XATTR_NAMESPACE_OFFSET)) | ((XATTR_NAMESPACE_EXT_MASK & (encodedName >> XATTR_NAMESPACE_EXT_OFFSET)) << 2); o(INODE_SECTION_NS, XAttrProtos.XAttrProto. - XAttrNamespaceProto.valueOf(ns).toString()); + XAttrNamespaceProto.forNumber(ns).toString()); o(SECTION_NAME, SerialNumberManager.XATTR.getString( XATTR_NAME_MASK & (encodedName >> XATTR_NAME_OFFSET), stringTable)); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java index 22f84c53959..7b635382574 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java @@ -910,7 +910,7 @@ public class TestPBHelper { b.setFileBufferSize(DFSConfigKeys.IO_FILE_BUFFER_SIZE_DEFAULT); b.setEncryptDataTransfer(DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_DEFAULT); b.setTrashInterval(DFSConfigKeys.FS_TRASH_INTERVAL_DEFAULT); - b.setChecksumType(HdfsProtos.ChecksumTypeProto.valueOf( + b.setChecksumType(HdfsProtos.ChecksumTypeProto.forNumber( DataChecksum.Type.valueOf(DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT).id)); HdfsProtos.FsServerDefaultsProto proto = b.build(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestLargeBlockReport.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestLargeBlockReport.java index f5a68925c3a..99dc783c86a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestLargeBlockReport.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestLargeBlockReport.java @@ -75,6 +75,9 @@ public class TestLargeBlockReport { @Test public void testBlockReportExceedsLengthLimit() throws Exception { + //protobuf's default limit increased to 2GB from protobuf 3.x onwards. + //So there will not be any exception thrown from protobuf. + conf.setInt(IPC_MAXIMUM_DATA_LENGTH, IPC_MAXIMUM_DATA_LENGTH_DEFAULT / 2); initCluster(); // Create a large enough report that we expect it will go beyond the RPC // server's length validation, and also protobuf length validation. diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index 0aa84dcdb17..9f85148fbf9 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -84,7 +84,7 @@ - 2.5.0 + 3.7.1 ${env.HADOOP_PROTOC_PATH} 3.4.13 @@ -1918,6 +1918,9 @@ false + + /opt/protobuf-3.7/bin/protoc + diff --git a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java index d4a632cf06d..a499c8ac000 100644 --- a/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java +++ b/hadoop-tools/hadoop-fs2img/src/main/java/org/apache/hadoop/hdfs/server/namenode/ImageWriter.java @@ -266,8 +266,8 @@ public class ImageWriter implements Closeable { e.writeDelimitedTo(dirs); } - private static int getOndiskSize(com.google.protobuf.GeneratedMessage s) { - return CodedOutputStream.computeRawVarint32Size(s.getSerializedSize()) + private static int getOndiskSize(com.google.protobuf.GeneratedMessageV3 s) { + return CodedOutputStream.computeUInt32SizeNoTag(s.getSerializedSize()) + s.getSerializedSize(); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml index 21ace7d4638..91b5d8d22c3 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/pom.xml @@ -55,6 +55,7 @@ org.apache.hadoop hadoop-annotations + com.google.protobuf protobuf-java diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/pb/PlacementConstraintToProtoConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/pb/PlacementConstraintToProtoConverter.java index 30f774136dc..caf254a2b3e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/pb/PlacementConstraintToProtoConverter.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/pb/PlacementConstraintToProtoConverter.java @@ -41,7 +41,7 @@ import org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto; import org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto; import org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto; -import com.google.protobuf.GeneratedMessage; +import com.google.protobuf.GeneratedMessageV3; /** * {@code PlacementConstraintToProtoConverter} generates a @@ -50,7 +50,7 @@ import com.google.protobuf.GeneratedMessage; */ @Private public class PlacementConstraintToProtoConverter - implements PlacementConstraint.Visitor { + implements PlacementConstraint.Visitor { private PlacementConstraint placementConstraint; @@ -65,7 +65,7 @@ public class PlacementConstraintToProtoConverter } @Override - public GeneratedMessage visit(SingleConstraint constraint) { + public GeneratedMessageV3 visit(SingleConstraint constraint) { SimplePlacementConstraintProto.Builder sb = SimplePlacementConstraintProto.newBuilder(); @@ -94,7 +94,7 @@ public class PlacementConstraintToProtoConverter } @Override - public GeneratedMessage visit(TargetExpression target) { + public GeneratedMessageV3 visit(TargetExpression target) { PlacementConstraintTargetProto.Builder tb = PlacementConstraintTargetProto.newBuilder(); @@ -109,16 +109,16 @@ public class PlacementConstraintToProtoConverter } @Override - public GeneratedMessage visit(TargetConstraint constraint) { + public GeneratedMessageV3 visit(TargetConstraint constraint) { throw new YarnRuntimeException("Unexpected TargetConstraint found."); } @Override - public GeneratedMessage visit(CardinalityConstraint constraint) { + public GeneratedMessageV3 visit(CardinalityConstraint constraint) { throw new YarnRuntimeException("Unexpected CardinalityConstraint found."); } - private GeneratedMessage visitAndOr( + private GeneratedMessageV3 visitAndOr( CompositeConstraint composite, CompositeType type) { CompositePlacementConstraintProto.Builder cb = CompositePlacementConstraintProto.newBuilder(); @@ -137,17 +137,17 @@ public class PlacementConstraintToProtoConverter } @Override - public GeneratedMessage visit(And constraint) { + public GeneratedMessageV3 visit(And constraint) { return visitAndOr(constraint, CompositeType.AND); } @Override - public GeneratedMessage visit(Or constraint) { + public GeneratedMessageV3 visit(Or constraint) { return visitAndOr(constraint, CompositeType.OR); } @Override - public GeneratedMessage visit(DelayedOr constraint) { + public GeneratedMessageV3 visit(DelayedOr constraint) { CompositePlacementConstraintProto.Builder cb = CompositePlacementConstraintProto.newBuilder(); @@ -166,7 +166,7 @@ public class PlacementConstraintToProtoConverter } @Override - public GeneratedMessage visit(TimedPlacementConstraint constraint) { + public GeneratedMessageV3 visit(TimedPlacementConstraint constraint) { TimedPlacementConstraintProto.Builder tb = TimedPlacementConstraintProto.newBuilder(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/NodePublishVolumeRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/NodePublishVolumeRequestPBImpl.java index c3590230c53..35638563009 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/NodePublishVolumeRequestPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/NodePublishVolumeRequestPBImpl.java @@ -154,10 +154,10 @@ public class NodePublishVolumeRequestPBImpl extends CsiAdaptorProtos.VolumeCapability vc = CsiAdaptorProtos.VolumeCapability.newBuilder() .setAccessMode(CsiAdaptorProtos.VolumeCapability - .AccessMode.valueOf( + .AccessMode.forNumber( capability.getAccessMode().ordinal())) .setVolumeType(CsiAdaptorProtos.VolumeCapability - .VolumeType.valueOf(capability.getVolumeType().ordinal())) + .VolumeType.forNumber(capability.getVolumeType().ordinal())) .addAllMountFlags(capability.getMountFlags()) .build(); builder.setVolumeCapability(vc); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ValidateVolumeCapabilitiesRequestPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ValidateVolumeCapabilitiesRequestPBImpl.java index 14bd89dc46c..bf3f4f55b24 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ValidateVolumeCapabilitiesRequestPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/ValidateVolumeCapabilitiesRequestPBImpl.java @@ -68,9 +68,9 @@ public class ValidateVolumeCapabilitiesRequestPBImpl extends CsiAdaptorProtos.VolumeCapability vc = CsiAdaptorProtos.VolumeCapability.newBuilder() .setAccessMode(CsiAdaptorProtos.VolumeCapability.AccessMode - .valueOf(volumeCapability.getAccessMode().ordinal())) + .forNumber(volumeCapability.getAccessMode().ordinal())) .setVolumeType(CsiAdaptorProtos.VolumeCapability.VolumeType - .valueOf(volumeCapability.getVolumeType().ordinal())) + .forNumber(volumeCapability.getVolumeType().ordinal())) .addAllMountFlags(volumeCapability.getMountFlags()) .build(); builder.addVolumeCapabilities(vc);