HADOOP-11894. Bump the version of Apache HTrace to 3.2.0-incubating (Masatake Iwasaki via Colin P. McCabe)
(cherry picked from commit a1140959da
)
This commit is contained in:
parent
2a56adc550
commit
4b108a441d
|
@ -135,6 +135,9 @@ Release 2.8.0 - UNRELEASED
|
||||||
HADOOP-12030. test-patch should only report on newly introduced
|
HADOOP-12030. test-patch should only report on newly introduced
|
||||||
findbugs warnings. (Sean Busbey via aw)
|
findbugs warnings. (Sean Busbey via aw)
|
||||||
|
|
||||||
|
HADOOP-11894. Bump the version of Apache HTrace to 3.2.0-incubating
|
||||||
|
(Masatake Iwasaki via Colin P. McCabe)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
HADOOP-11785. Reduce the number of listStatus operation in distcp
|
HADOOP-11785. Reduce the number of listStatus operation in distcp
|
||||||
|
|
|
@ -17,33 +17,23 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.tracing;
|
package org.apache.hadoop.tracing;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.DataInputStream;
|
|
||||||
import java.io.EOFException;
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
import java.util.UUID;
|
|
||||||
|
|
||||||
import org.apache.commons.io.Charsets;
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
|
||||||
import org.apache.hadoop.tracing.SpanReceiverInfo.ConfigurationPair;
|
import org.apache.hadoop.tracing.SpanReceiverInfo.ConfigurationPair;
|
||||||
import org.apache.hadoop.tracing.TraceUtils;
|
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
|
||||||
import org.apache.hadoop.util.ShutdownHookManager;
|
import org.apache.hadoop.util.ShutdownHookManager;
|
||||||
import org.apache.htrace.SpanReceiver;
|
import org.apache.htrace.SpanReceiver;
|
||||||
import org.apache.htrace.SpanReceiverBuilder;
|
import org.apache.htrace.SpanReceiverBuilder;
|
||||||
import org.apache.htrace.Trace;
|
import org.apache.htrace.Trace;
|
||||||
|
import org.apache.htrace.impl.LocalFileSpanReceiver;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class provides functions for reading the names of SpanReceivers from
|
* This class provides functions for reading the names of SpanReceivers from
|
||||||
|
@ -54,7 +44,7 @@ import org.apache.htrace.Trace;
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class SpanReceiverHost implements TraceAdminProtocol {
|
public class SpanReceiverHost implements TraceAdminProtocol {
|
||||||
public static final String SPAN_RECEIVERS_CONF_SUFFIX =
|
public static final String SPAN_RECEIVERS_CONF_SUFFIX =
|
||||||
"spanreceiver.classes";
|
"spanreceiver.classes";
|
||||||
private static final Log LOG = LogFactory.getLog(SpanReceiverHost.class);
|
private static final Log LOG = LogFactory.getLog(SpanReceiverHost.class);
|
||||||
private static final HashMap<String, SpanReceiverHost> hosts =
|
private static final HashMap<String, SpanReceiverHost> hosts =
|
||||||
new HashMap<String, SpanReceiverHost>(1);
|
new HashMap<String, SpanReceiverHost>(1);
|
||||||
|
@ -88,33 +78,6 @@ public class SpanReceiverHost implements TraceAdminProtocol {
|
||||||
|
|
||||||
private static List<ConfigurationPair> EMPTY = Collections.emptyList();
|
private static List<ConfigurationPair> EMPTY = Collections.emptyList();
|
||||||
|
|
||||||
private static String getUniqueLocalTraceFileName() {
|
|
||||||
String tmp = System.getProperty("java.io.tmpdir", "/tmp");
|
|
||||||
String nonce = null;
|
|
||||||
BufferedReader reader = null;
|
|
||||||
try {
|
|
||||||
// On Linux we can get a unique local file name by reading the process id
|
|
||||||
// out of /proc/self/stat. (There isn't any portable way to get the
|
|
||||||
// process ID from Java.)
|
|
||||||
reader = new BufferedReader(
|
|
||||||
new InputStreamReader(new FileInputStream("/proc/self/stat"),
|
|
||||||
Charsets.UTF_8));
|
|
||||||
String line = reader.readLine();
|
|
||||||
if (line == null) {
|
|
||||||
throw new EOFException();
|
|
||||||
}
|
|
||||||
nonce = line.split(" ")[0];
|
|
||||||
} catch (IOException e) {
|
|
||||||
} finally {
|
|
||||||
IOUtils.cleanup(LOG, reader);
|
|
||||||
}
|
|
||||||
if (nonce == null) {
|
|
||||||
// If we can't use the process ID, use a random nonce.
|
|
||||||
nonce = UUID.randomUUID().toString();
|
|
||||||
}
|
|
||||||
return new File(tmp, nonce).getAbsolutePath();
|
|
||||||
}
|
|
||||||
|
|
||||||
private SpanReceiverHost(String confPrefix) {
|
private SpanReceiverHost(String confPrefix) {
|
||||||
this.confPrefix = confPrefix;
|
this.confPrefix = confPrefix;
|
||||||
}
|
}
|
||||||
|
@ -143,7 +106,7 @@ public class SpanReceiverHost implements TraceAdminProtocol {
|
||||||
// testing.
|
// testing.
|
||||||
String pathKey = confPrefix + LOCAL_FILE_SPAN_RECEIVER_PATH_SUFFIX;
|
String pathKey = confPrefix + LOCAL_FILE_SPAN_RECEIVER_PATH_SUFFIX;
|
||||||
if (config.get(pathKey) == null) {
|
if (config.get(pathKey) == null) {
|
||||||
String uniqueFile = getUniqueLocalTraceFileName();
|
String uniqueFile = LocalFileSpanReceiver.getUniqueLocalTraceFileName();
|
||||||
config.set(pathKey, uniqueFile);
|
config.set(pathKey, uniqueFile);
|
||||||
if (LOG.isTraceEnabled()) {
|
if (LOG.isTraceEnabled()) {
|
||||||
LOG.trace("Set " + pathKey + " to " + uniqueFile);
|
LOG.trace("Set " + pathKey + " to " + uniqueFile);
|
||||||
|
@ -191,7 +154,7 @@ public class SpanReceiverHost implements TraceAdminProtocol {
|
||||||
|
|
||||||
public synchronized SpanReceiverInfo[] listSpanReceivers()
|
public synchronized SpanReceiverInfo[] listSpanReceivers()
|
||||||
throws IOException {
|
throws IOException {
|
||||||
SpanReceiverInfo info[] = new SpanReceiverInfo[receivers.size()];
|
SpanReceiverInfo[] info = new SpanReceiverInfo[receivers.size()];
|
||||||
int i = 0;
|
int i = 0;
|
||||||
|
|
||||||
for(Map.Entry<Long, SpanReceiver> entry : receivers.entrySet()) {
|
for(Map.Entry<Long, SpanReceiver> entry : receivers.entrySet()) {
|
||||||
|
|
|
@ -78,6 +78,10 @@ You also need to add the jar bundling SpanReceiver to the classpath of Hadoop
|
||||||
on each node. (LocalFileSpanReceiver in the example above is included in the
|
on each node. (LocalFileSpanReceiver in the example above is included in the
|
||||||
jar of htrace-core which is bundled with Hadoop.)
|
jar of htrace-core which is bundled with Hadoop.)
|
||||||
|
|
||||||
|
```
|
||||||
|
$ cp htrace-htraced/target/htrace-htraced-3.2.0-incubating.jar $HADOOP_HOME/share/hadoop/common/lib/
|
||||||
|
```
|
||||||
|
|
||||||
### Dynamic update of tracing configuration
|
### Dynamic update of tracing configuration
|
||||||
|
|
||||||
You can use `hadoop trace` command to see and update the tracing configuration of each servers.
|
You can use `hadoop trace` command to see and update the tracing configuration of each servers.
|
||||||
|
@ -149,6 +153,7 @@ which start tracing span before invoking HDFS shell command.
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FsShell;
|
import org.apache.hadoop.fs.FsShell;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
|
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||||
import org.apache.hadoop.tracing.SpanReceiverHost;
|
import org.apache.hadoop.tracing.SpanReceiverHost;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.apache.htrace.Sampler;
|
import org.apache.htrace.Sampler;
|
||||||
|
@ -157,19 +162,16 @@ which start tracing span before invoking HDFS shell command.
|
||||||
|
|
||||||
public class TracingFsShell {
|
public class TracingFsShell {
|
||||||
public static void main(String argv[]) throws Exception {
|
public static void main(String argv[]) throws Exception {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new HdfsConfiguration();
|
||||||
FsShell shell = new FsShell();
|
FsShell shell = new FsShell();
|
||||||
conf.setQuietMode(false);
|
conf.setQuietMode(false);
|
||||||
shell.setConf(conf);
|
shell.setConf(conf);
|
||||||
SpanReceiverHost.get(conf, DFSConfigKeys.DFS_SERVER_HTRACE_PREFIX);
|
SpanReceiverHost.get(conf, DFSConfigKeys.DFS_SERVER_HTRACE_PREFIX);
|
||||||
int res = 0;
|
int res = 0;
|
||||||
TraceScope ts = null;
|
try (TraceScope ts = Trace.startSpan("FsShell", Sampler.ALWAYS)) {
|
||||||
try {
|
|
||||||
ts = Trace.startSpan("FsShell", Sampler.ALWAYS);
|
|
||||||
res = ToolRunner.run(shell, argv);
|
res = ToolRunner.run(shell, argv);
|
||||||
} finally {
|
} finally {
|
||||||
shell.close();
|
shell.close();
|
||||||
if (ts != null) ts.close();
|
|
||||||
}
|
}
|
||||||
System.exit(res);
|
System.exit(res);
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,7 +38,6 @@ import java.net.Socket;
|
||||||
import java.net.SocketAddress;
|
import java.net.SocketAddress;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.nio.charset.Charset;
|
|
||||||
import java.security.GeneralSecurityException;
|
import java.security.GeneralSecurityException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -3197,35 +3196,26 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
|
||||||
return saslClient;
|
return saslClient;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final byte[] PATH = "path".getBytes(Charset.forName("UTF-8"));
|
|
||||||
|
|
||||||
TraceScope getPathTraceScope(String description, String path) {
|
TraceScope getPathTraceScope(String description, String path) {
|
||||||
TraceScope scope = Trace.startSpan(description, traceSampler);
|
TraceScope scope = Trace.startSpan(description, traceSampler);
|
||||||
Span span = scope.getSpan();
|
Span span = scope.getSpan();
|
||||||
if (span != null) {
|
if (span != null) {
|
||||||
if (path != null) {
|
if (path != null) {
|
||||||
span.addKVAnnotation(PATH,
|
span.addKVAnnotation("path", path);
|
||||||
path.getBytes(Charset.forName("UTF-8")));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return scope;
|
return scope;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final byte[] SRC = "src".getBytes(Charset.forName("UTF-8"));
|
|
||||||
|
|
||||||
private static final byte[] DST = "dst".getBytes(Charset.forName("UTF-8"));
|
|
||||||
|
|
||||||
TraceScope getSrcDstTraceScope(String description, String src, String dst) {
|
TraceScope getSrcDstTraceScope(String description, String src, String dst) {
|
||||||
TraceScope scope = Trace.startSpan(description, traceSampler);
|
TraceScope scope = Trace.startSpan(description, traceSampler);
|
||||||
Span span = scope.getSpan();
|
Span span = scope.getSpan();
|
||||||
if (span != null) {
|
if (span != null) {
|
||||||
if (src != null) {
|
if (src != null) {
|
||||||
span.addKVAnnotation(SRC,
|
span.addKVAnnotation("src", src);
|
||||||
src.getBytes(Charset.forName("UTF-8")));
|
|
||||||
}
|
}
|
||||||
if (dst != null) {
|
if (dst != null) {
|
||||||
span.addKVAnnotation(DST,
|
span.addKVAnnotation("dst", dst);
|
||||||
dst.getBytes(Charset.forName("UTF-8")));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return scope;
|
return scope;
|
||||||
|
|
|
@ -797,7 +797,7 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.htrace</groupId>
|
<groupId>org.apache.htrace</groupId>
|
||||||
<artifactId>htrace-core</artifactId>
|
<artifactId>htrace-core</artifactId>
|
||||||
<version>3.1.0-incubating</version>
|
<version>3.2.0-incubating</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.jdom</groupId>
|
<groupId>org.jdom</groupId>
|
||||||
|
|
Loading…
Reference in New Issue