diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java index 5fdfbfadd2d..4cf1ead23fb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java @@ -29,6 +29,7 @@ import org.apache.commons.io.Charsets; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; @@ -36,6 +37,8 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.tools.TableListing; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * A command-line tool for viewing and modifying tracing settings. @@ -44,6 +47,7 @@ import org.apache.hadoop.util.Tool; public class TraceAdmin extends Configured implements Tool { private TraceAdminProtocolPB proxy; private TraceAdminProtocolTranslatorPB remote; + private static final Logger LOG = LoggerFactory.getLogger(TraceAdmin.class); private void usage() { PrintStream err = System.err; @@ -61,7 +65,9 @@ public class TraceAdmin extends Configured implements Tool { " -list: List the current span receivers.\n" + " -remove [id]\n" + " Remove the span receiver with the specified id. Use -list to\n" + - " find the id of each receiver.\n" + " find the id of each receiver.\n" + + " -principal: If the daemon is Kerberized, specify the service\n" + + " principal name." ); } @@ -166,6 +172,14 @@ public class TraceAdmin extends Configured implements Tool { System.err.println("You must specify an operation."); return 1; } + String servicePrincipal = StringUtils.popOptionWithArgument("-principal", + args); + if (servicePrincipal != null) { + LOG.debug("Set service principal: {}", servicePrincipal); + getConf().set( + CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY, + servicePrincipal); + } RPC.setProtocolEngine(getConf(), TraceAdminProtocolPB.class, ProtobufRpcEngine.class); InetSocketAddress address = NetUtils.createSocketAddr(hostPort); diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/Tracing.md b/hadoop-common-project/hadoop-common/src/site/markdown/Tracing.md index 1876225e218..4e202750397 100644 --- a/hadoop-common-project/hadoop-common/src/site/markdown/Tracing.md +++ b/hadoop-common-project/hadoop-common/src/site/markdown/Tracing.md @@ -84,6 +84,15 @@ You can specify the configuration associated with span receiver by `-Ckey=value` ID CLASS 2 org.apache.htrace.core.LocalFileSpanReceiver +If the cluster is Kerberized, the service principal name must be specified using `-principal` option. +For example, to show list of span receivers of a namenode: + + $ hadoop trace -list -host NN1:8020 -principal namenode/NN1@EXAMPLE.COM + +Or, for a datanode: + + $ hadoop trace -list -host DN2:9867 -principal datanode/DN1@EXAMPLE.COM + ### Starting tracing spans by HTrace API In order to trace, you will need to wrap the traced logic with **tracing span** as shown below. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTraceAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTraceAdmin.java index 198dafb3412..d6d0550fb5f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTraceAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTraceAdmin.java @@ -19,8 +19,12 @@ package org.apache.hadoop.tracing; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferTestCase; +import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.net.unix.TemporarySocketDirectory; +import org.apache.hadoop.security.UserGroupInformation; import org.apache.htrace.core.Tracer; import org.junit.Assert; import org.junit.Test; @@ -28,9 +32,18 @@ import org.junit.Test; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.PrintStream; +import java.security.PrivilegedExceptionAction; +import java.util.ArrayList; -public class TestTraceAdmin { +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertEquals; + +/** + * Test cases for TraceAdmin. + */ +public class TestTraceAdmin extends SaslDataTransferTestCase { private static final String NEWLINE = System.getProperty("line.separator"); + private final static int ONE_DATANODE = 1; private String runTraceCommand(TraceAdmin trace, String... cmd) throws Exception { @@ -58,6 +71,12 @@ public class TestTraceAdmin { return "127.0.0.1:" + cluster.getNameNodePort(); } + private String getHostPortForDN(MiniDFSCluster cluster, int index) { + ArrayList dns = cluster.getDataNodes(); + assertTrue(index >= 0 && index < dns.size()); + return "127.0.0.1:" + dns.get(index).getIpcPort(); + } + @Test public void testCreateAndDestroySpanReceiver() throws Exception { Configuration conf = new Configuration(); @@ -102,4 +121,52 @@ public class TestTraceAdmin { tempDir.close(); } } + + /** + * Test running hadoop trace commands with -principal option against + * Kerberized NN and DN. + * + * @throws Exception + */ + @Test + public void testKerberizedTraceAdmin() throws Exception { + MiniDFSCluster cluster = null; + final HdfsConfiguration conf = createSecureConfig( + "authentication,privacy"); + try { + cluster = new MiniDFSCluster.Builder(conf) + .numDataNodes(ONE_DATANODE) + .build(); + cluster.waitActive(); + final String nnHost = getHostPortForNN(cluster); + final String dnHost = getHostPortForDN(cluster, 0); + // login using keytab and run commands + UserGroupInformation + .loginUserFromKeytabAndReturnUGI(getHdfsPrincipal(), getHdfsKeytab()) + .doAs(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + // send trace command to NN + TraceAdmin trace = new TraceAdmin(); + trace.setConf(conf); + final String[] nnTraceCmd = new String[] { + "-list", "-host", nnHost, "-principal", + conf.get(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY)}; + int ret = trace.run(nnTraceCmd); + assertEquals(0, ret); + // send trace command to DN + final String[] dnTraceCmd = new String[] { + "-list", "-host", dnHost, "-principal", + conf.get(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY)}; + ret = trace.run(dnTraceCmd); + assertEquals(0, ret); + return null; + } + }); + } finally { + if (cluster != null) { + cluster.shutdown(); + } + } + } }