From caa658a015e22938c345451bd4de46b0e0f08507 Mon Sep 17 00:00:00 2001 From: Colin Patrick Mccabe Date: Thu, 23 Oct 2014 19:14:00 -0700 Subject: [PATCH] HDFS-7223. Tracing span description of IPC client is too long (iwasakims via cmccabe) (cherry picked from commit 5b56ac4c72d69d1b3d6feec9d96c9d62eb02d28c) --- .../apache/hadoop/ipc/ProtobufRpcEngine.java | 4 +--- .../org/apache/hadoop/ipc/RpcClientUtil.java | 21 +++++++++++++++++++ .../apache/hadoop/ipc/WritableRpcEngine.java | 4 +--- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../apache/hadoop/tracing/TestTracing.java | 10 ++++----- 5 files changed, 31 insertions(+), 11 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java index f8c35d91bef..d8e22c7d874 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java @@ -212,9 +212,7 @@ public class ProtobufRpcEngine implements RpcEngine { // guard it in the if statement to make sure there isn't // any extra string manipulation. if (Trace.isTracing()) { - traceScope = Trace.startSpan( - method.getDeclaringClass().getCanonicalName() + - "." + method.getName()); + traceScope = Trace.startSpan(RpcClientUtil.methodToTraceString(method)); } RequestHeaderProto rpcRequestHeader = constructRpcRequestHeader(method); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java index 2623f9ede5c..d9bd71b1f04 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java @@ -189,4 +189,25 @@ public class RpcClientUtil { .getProtocolMetaInfoProxy(inv.getConnectionId(), conf, NetUtils.getDefaultSocketFactory(conf)).getProxy(); } + + /** + * Convert an RPC method to a string. + * The format we want is 'MethodOuterClassShortName#methodName'. + * + * For example, if the method is: + * org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos. + * ClientNamenodeProtocol.BlockingInterface.getServerDefaults + * + * the format we want is: + * ClientNamenodeProtocol#getServerDefaults + */ + public static String methodToTraceString(Method method) { + Class clazz = method.getDeclaringClass(); + while (true) { + Class next = clazz.getEnclosingClass(); + if (next == null || next.getEnclosingClass() == null) break; + clazz = next; + } + return clazz.getSimpleName() + "#" + method.getName(); + } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java index 037f0061668..869a52a61b4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java @@ -235,9 +235,7 @@ public class WritableRpcEngine implements RpcEngine { } TraceScope traceScope = null; if (Trace.isTracing()) { - traceScope = Trace.startSpan( - method.getDeclaringClass().getCanonicalName() + - "." + method.getName()); + traceScope = Trace.startSpan(RpcClientUtil.methodToTraceString(method)); } ObjectWritable value; try { diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 0c6c41f86d9..88e9111c060 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -44,6 +44,9 @@ Release 2.7.0 - UNRELEASED HDFS-7257. Add the time of last HA state transition to NN's /jmx page. (Charles Lamb via wheat9) + HDFS-7223. Tracing span description of IPC client is too long (iwasakims + via cmccabe) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java index 5c245009bcd..0f96c6f5997 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracing.java @@ -83,15 +83,15 @@ public class TestTracing { String[] expectedSpanNames = { "testWriteTraceHooks", "org.apache.hadoop.hdfs.protocol.ClientProtocol.create", - "org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol.BlockingInterface.create", + "ClientNamenodeProtocol#create", "org.apache.hadoop.hdfs.protocol.ClientProtocol.fsync", - "org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol.BlockingInterface.fsync", + "ClientNamenodeProtocol#fsync", "org.apache.hadoop.hdfs.protocol.ClientProtocol.complete", - "org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol.BlockingInterface.complete", + "ClientNamenodeProtocol#complete", "DFSOutputStream", "OpWriteBlockProto", "org.apache.hadoop.hdfs.protocol.ClientProtocol.addBlock", - "org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol.BlockingInterface.addBlock" + "ClientNamenodeProtocol#addBlock" }; assertSpanNamesFound(expectedSpanNames); @@ -162,7 +162,7 @@ public class TestTracing { String[] expectedSpanNames = { "testReadTraceHooks", "org.apache.hadoop.hdfs.protocol.ClientProtocol.getBlockLocations", - "org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol.BlockingInterface.getBlockLocations", + "ClientNamenodeProtocol#getBlockLocations", "OpReadBlockProto" }; assertSpanNamesFound(expectedSpanNames);