HADOOP-11894. Bump the version of Apache HTrace to 3.2.0-incubating (Masatake Iwasaki via Colin P. McCabe)

(cherry picked from commit a1140959da)
This commit is contained in:
Colin Patrick Mccabe 2015-05-28 12:00:55 -07:00
parent 2a56adc550
commit 4b108a441d
5 changed files with 18 additions and 60 deletions

View File

@ -135,6 +135,9 @@ Release 2.8.0 - UNRELEASED
HADOOP-12030. test-patch should only report on newly introduced
findbugs warnings. (Sean Busbey via aw)
HADOOP-11894. Bump the version of Apache HTrace to 3.2.0-incubating
(Masatake Iwasaki via Colin P. McCabe)
OPTIMIZATIONS
HADOOP-11785. Reduce the number of listStatus operation in distcp

View File

@ -17,33 +17,23 @@
*/
package org.apache.hadoop.tracing;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.UUID;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.tracing.SpanReceiverInfo.ConfigurationPair;
import org.apache.hadoop.tracing.TraceUtils;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.ShutdownHookManager;
import org.apache.htrace.SpanReceiver;
import org.apache.htrace.SpanReceiverBuilder;
import org.apache.htrace.Trace;
import org.apache.htrace.impl.LocalFileSpanReceiver;
/**
* This class provides functions for reading the names of SpanReceivers from
@ -88,33 +78,6 @@ public class SpanReceiverHost implements TraceAdminProtocol {
private static List<ConfigurationPair> EMPTY = Collections.emptyList();
private static String getUniqueLocalTraceFileName() {
String tmp = System.getProperty("java.io.tmpdir", "/tmp");
String nonce = null;
BufferedReader reader = null;
try {
// On Linux we can get a unique local file name by reading the process id
// out of /proc/self/stat. (There isn't any portable way to get the
// process ID from Java.)
reader = new BufferedReader(
new InputStreamReader(new FileInputStream("/proc/self/stat"),
Charsets.UTF_8));
String line = reader.readLine();
if (line == null) {
throw new EOFException();
}
nonce = line.split(" ")[0];
} catch (IOException e) {
} finally {
IOUtils.cleanup(LOG, reader);
}
if (nonce == null) {
// If we can't use the process ID, use a random nonce.
nonce = UUID.randomUUID().toString();
}
return new File(tmp, nonce).getAbsolutePath();
}
private SpanReceiverHost(String confPrefix) {
this.confPrefix = confPrefix;
}
@ -143,7 +106,7 @@ public class SpanReceiverHost implements TraceAdminProtocol {
// testing.
String pathKey = confPrefix + LOCAL_FILE_SPAN_RECEIVER_PATH_SUFFIX;
if (config.get(pathKey) == null) {
String uniqueFile = getUniqueLocalTraceFileName();
String uniqueFile = LocalFileSpanReceiver.getUniqueLocalTraceFileName();
config.set(pathKey, uniqueFile);
if (LOG.isTraceEnabled()) {
LOG.trace("Set " + pathKey + " to " + uniqueFile);
@ -191,7 +154,7 @@ public class SpanReceiverHost implements TraceAdminProtocol {
public synchronized SpanReceiverInfo[] listSpanReceivers()
throws IOException {
SpanReceiverInfo info[] = new SpanReceiverInfo[receivers.size()];
SpanReceiverInfo[] info = new SpanReceiverInfo[receivers.size()];
int i = 0;
for(Map.Entry<Long, SpanReceiver> entry : receivers.entrySet()) {

View File

@ -78,6 +78,10 @@ You also need to add the jar bundling SpanReceiver to the classpath of Hadoop
on each node. (LocalFileSpanReceiver in the example above is included in the
jar of htrace-core which is bundled with Hadoop.)
```
$ cp htrace-htraced/target/htrace-htraced-3.2.0-incubating.jar $HADOOP_HOME/share/hadoop/common/lib/
```
### Dynamic update of tracing configuration
You can use `hadoop trace` command to see and update the tracing configuration of each servers.
@ -149,6 +153,7 @@ which start tracing span before invoking HDFS shell command.
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.tracing.SpanReceiverHost;
import org.apache.hadoop.util.ToolRunner;
import org.apache.htrace.Sampler;
@ -157,19 +162,16 @@ which start tracing span before invoking HDFS shell command.
public class TracingFsShell {
public static void main(String argv[]) throws Exception {
Configuration conf = new Configuration();
Configuration conf = new HdfsConfiguration();
FsShell shell = new FsShell();
conf.setQuietMode(false);
shell.setConf(conf);
SpanReceiverHost.get(conf, DFSConfigKeys.DFS_SERVER_HTRACE_PREFIX);
int res = 0;
TraceScope ts = null;
try {
ts = Trace.startSpan("FsShell", Sampler.ALWAYS);
try (TraceScope ts = Trace.startSpan("FsShell", Sampler.ALWAYS)) {
res = ToolRunner.run(shell, argv);
} finally {
shell.close();
if (ts != null) ts.close();
}
System.exit(res);
}

View File

@ -38,7 +38,6 @@ import java.net.Socket;
import java.net.SocketAddress;
import java.net.URI;
import java.net.UnknownHostException;
import java.nio.charset.Charset;
import java.security.GeneralSecurityException;
import java.util.ArrayList;
import java.util.Collections;
@ -3197,35 +3196,26 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
return saslClient;
}
private static final byte[] PATH = "path".getBytes(Charset.forName("UTF-8"));
TraceScope getPathTraceScope(String description, String path) {
TraceScope scope = Trace.startSpan(description, traceSampler);
Span span = scope.getSpan();
if (span != null) {
if (path != null) {
span.addKVAnnotation(PATH,
path.getBytes(Charset.forName("UTF-8")));
span.addKVAnnotation("path", path);
}
}
return scope;
}
private static final byte[] SRC = "src".getBytes(Charset.forName("UTF-8"));
private static final byte[] DST = "dst".getBytes(Charset.forName("UTF-8"));
TraceScope getSrcDstTraceScope(String description, String src, String dst) {
TraceScope scope = Trace.startSpan(description, traceSampler);
Span span = scope.getSpan();
if (span != null) {
if (src != null) {
span.addKVAnnotation(SRC,
src.getBytes(Charset.forName("UTF-8")));
span.addKVAnnotation("src", src);
}
if (dst != null) {
span.addKVAnnotation(DST,
dst.getBytes(Charset.forName("UTF-8")));
span.addKVAnnotation("dst", dst);
}
}
return scope;

View File

@ -797,7 +797,7 @@
<dependency>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
<version>3.1.0-incubating</version>
<version>3.2.0-incubating</version>
</dependency>
<dependency>
<groupId>org.jdom</groupId>