HADOOP-11498. Bump the version of HTrace to 3.1.0-incubating (Masatake Iwasaki via Colin P. McCabe)

(cherry picked from commit 09ad9a868a)
This commit is contained in:
Colin Patrick Mccabe 2015-01-30 16:01:59 -08:00
parent 3a01984bda
commit 80381b62aa
34 changed files with 219 additions and 202 deletions

View File

@ -160,6 +160,9 @@ Release 2.7.0 - UNRELEASED
HADOOP-9137. Support connection limiting in IPC server (kihwal) HADOOP-9137. Support connection limiting in IPC server (kihwal)
HADOOP-11498. Bump the version of HTrace to 3.1.0-incubating (Masatake
Iwasaki via Colin P. McCabe)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-11323. WritableComparator#compare keeps reference to byte array. HADOOP-11323. WritableComparator#compare keeps reference to byte array.

View File

@ -240,7 +240,7 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.htrace</groupId> <groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId> <artifactId>htrace-core</artifactId>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -88,7 +88,7 @@ import org.apache.hadoop.util.ProtoUtil;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.htrace.Trace; import org.apache.htrace.Trace;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;

View File

@ -49,9 +49,8 @@ import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.ProtoUtil; import org.apache.hadoop.util.ProtoUtil;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.htrace.Sampler; import org.apache.htrace.Trace;
import org.htrace.Trace; import org.apache.htrace.TraceScope;
import org.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.BlockingService; import com.google.protobuf.BlockingService;

View File

@ -116,10 +116,10 @@ import org.apache.hadoop.util.ProtoUtil;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.htrace.Span; import org.apache.htrace.Span;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.TraceInfo; import org.apache.htrace.TraceInfo;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.ByteString; import com.google.protobuf.ByteString;

View File

@ -42,8 +42,8 @@ import org.apache.hadoop.util.Time;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.*; import org.apache.hadoop.conf.*;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
/** An RpcEngine implementation for Writable data. */ /** An RpcEngine implementation for Writable data. */
@InterfaceStability.Evolving @InterfaceStability.Evolving

View File

@ -25,7 +25,6 @@ import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
@ -38,11 +37,12 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.tracing.SpanReceiverInfo.ConfigurationPair; import org.apache.hadoop.tracing.SpanReceiverInfo.ConfigurationPair;
import org.apache.hadoop.tracing.TraceUtils;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.ShutdownHookManager; import org.apache.hadoop.util.ShutdownHookManager;
import org.htrace.HTraceConfiguration; import org.apache.htrace.SpanReceiver;
import org.htrace.SpanReceiver; import org.apache.htrace.SpanReceiverBuilder;
import org.htrace.Trace; import org.apache.htrace.Trace;
/** /**
* This class provides functions for reading the names of SpanReceivers from * This class provides functions for reading the names of SpanReceivers from
@ -156,60 +156,13 @@ public class SpanReceiverHost implements TraceAdminProtocol {
private synchronized SpanReceiver loadInstance(String className, private synchronized SpanReceiver loadInstance(String className,
List<ConfigurationPair> extraConfig) throws IOException { List<ConfigurationPair> extraConfig) throws IOException {
className = className.trim(); SpanReceiverBuilder builder =
if (!className.contains(".")) { new SpanReceiverBuilder(TraceUtils.wrapHadoopConf(config, extraConfig));
className = "org.htrace.impl." + className; SpanReceiver rcvr = builder.spanReceiverClass(className.trim()).build();
if (rcvr == null) {
throw new IOException("Failed to load SpanReceiver " + className);
} }
Class<?> implClass = null; return rcvr;
SpanReceiver impl;
try {
implClass = Class.forName(className);
Object o = ReflectionUtils.newInstance(implClass, config);
impl = (SpanReceiver)o;
impl.configure(wrapHadoopConf(config, extraConfig));
} catch (ClassCastException e) {
throw new IOException("Class " + className +
" does not implement SpanReceiver.");
} catch (ClassNotFoundException e) {
throw new IOException("Class " + className + " cannot be found.");
} catch (SecurityException e) {
throw new IOException("Got SecurityException while loading " +
"SpanReceiver " + className);
} catch (IllegalArgumentException e) {
throw new IOException("Got IllegalArgumentException while loading " +
"SpanReceiver " + className, e);
} catch (RuntimeException e) {
throw new IOException("Got RuntimeException while loading " +
"SpanReceiver " + className, e);
}
return impl;
}
private static HTraceConfiguration wrapHadoopConf(final Configuration conf,
List<ConfigurationPair> extraConfig) {
final HashMap<String, String> extraMap = new HashMap<String, String>();
for (ConfigurationPair pair : extraConfig) {
extraMap.put(pair.getKey(), pair.getValue());
}
return new HTraceConfiguration() {
public static final String HTRACE_CONF_PREFIX = "hadoop.htrace.";
@Override
public String get(String key) {
if (extraMap.containsKey(key)) {
return extraMap.get(key);
}
return conf.get(HTRACE_CONF_PREFIX + key);
}
@Override
public String get(String key, String defaultValue) {
if (extraMap.containsKey(key)) {
return extraMap.get(key);
}
return conf.get(HTRACE_CONF_PREFIX + key, defaultValue);
}
};
} }
/** /**

View File

@ -1,53 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tracing;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.htrace.Sampler;
import org.htrace.impl.ProbabilitySampler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class TraceSamplerFactory {
private static final Logger LOG =
LoggerFactory.getLogger(TraceSamplerFactory.class);
public static Sampler<?> createSampler(Configuration conf) {
String samplerStr = conf.get(CommonConfigurationKeys.HADOOP_TRACE_SAMPLER,
CommonConfigurationKeys.HADOOP_TRACE_SAMPLER_DEFAULT);
if (samplerStr.equals("NeverSampler")) {
LOG.debug("HTrace is OFF for all spans.");
return Sampler.NEVER;
} else if (samplerStr.equals("AlwaysSampler")) {
LOG.info("HTrace is ON for all spans.");
return Sampler.ALWAYS;
} else if (samplerStr.equals("ProbabilitySampler")) {
double percentage =
conf.getDouble("htrace.probability.sampler.percentage", 0.01d);
LOG.info("HTrace is ON for " + percentage + "% of top-level spans.");
return new ProbabilitySampler(percentage / 100.0d);
} else {
throw new RuntimeException("Can't create sampler " + samplerStr +
". Available samplers are NeverSampler, AlwaysSampler, " +
"and ProbabilitySampler.");
}
}
}

View File

@ -0,0 +1,65 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tracing;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.tracing.SpanReceiverInfo.ConfigurationPair;
import org.apache.htrace.HTraceConfiguration;
/**
* This class provides utility functions for tracing.
*/
@InterfaceAudience.Private
public class TraceUtils {
public static final String HTRACE_CONF_PREFIX = "hadoop.htrace.";
private static List<ConfigurationPair> EMPTY = Collections.emptyList();
public static HTraceConfiguration wrapHadoopConf(final Configuration conf) {
return wrapHadoopConf(conf, EMPTY);
}
public static HTraceConfiguration wrapHadoopConf(final Configuration conf,
List<ConfigurationPair> extraConfig) {
final HashMap<String, String> extraMap = new HashMap<String, String>();
for (ConfigurationPair pair : extraConfig) {
extraMap.put(pair.getKey(), pair.getValue());
}
return new HTraceConfiguration() {
@Override
public String get(String key) {
if (extraMap.containsKey(key)) {
return extraMap.get(key);
}
return conf.get(HTRACE_CONF_PREFIX + key, "");
}
@Override
public String get(String key, String defaultValue) {
if (extraMap.containsKey(key)) {
return extraMap.get(key);
}
return conf.get(HTRACE_CONF_PREFIX + key, defaultValue);
}
};
}
}

View File

@ -27,8 +27,8 @@ import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformation
import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.*; import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.*;
import org.apache.hadoop.security.SaslRpcServer.AuthMethod; import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.htrace.Span; import org.apache.htrace.Span;
import org.htrace.Trace; import org.apache.htrace.Trace;
import com.google.protobuf.ByteString; import com.google.protobuf.ByteString;

View File

@ -60,7 +60,7 @@ public void receiveSpan(Span span);
+---- +----
<property> <property>
<name>hadoop.htrace.spanreceiver.classes</name> <name>hadoop.htrace.spanreceiver.classes</name>
<value>org.htrace.impl.LocalFileSpanReceiver</value> <value>org.apache.htrace.impl.LocalFileSpanReceiver</value>
</property> </property>
<property> <property>
<name>hadoop.htrace.local-file-span-receiver.path</name> <name>hadoop.htrace.local-file-span-receiver.path</name>
@ -131,11 +131,11 @@ public void receiveSpan(Span span);
+---- +----
$ hadoop trace -list -host 192.168.56.2:9000 $ hadoop trace -list -host 192.168.56.2:9000
ID CLASS ID CLASS
1 org.htrace.impl.LocalFileSpanReceiver 1 org.apache.htrace.impl.LocalFileSpanReceiver
$ hadoop trace -list -host 192.168.56.2:50020 $ hadoop trace -list -host 192.168.56.2:50020
ID CLASS ID CLASS
1 org.htrace.impl.LocalFileSpanReceiver 1 org.apache.htrace.impl.LocalFileSpanReceiver
+---- +----
<<<hadoop trace -remove>>> removes span receiver from server. <<<hadoop trace -remove>>> removes span receiver from server.
@ -156,7 +156,7 @@ public void receiveSpan(Span span);
$ hadoop trace -list -host 192.168.56.2:9000 $ hadoop trace -list -host 192.168.56.2:9000
ID CLASS ID CLASS
2 org.htrace.impl.LocalFileSpanReceiver 2 org.apache.htrace.impl.LocalFileSpanReceiver
+---- +----
@ -172,9 +172,9 @@ public void receiveSpan(Span span);
+---- +----
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.tracing.SpanReceiverHost; import org.apache.hadoop.tracing.SpanReceiverHost;
import org.htrace.Sampler; import org.apache.htrace.Sampler;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
... ...
@ -200,9 +200,9 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.tracing.SpanReceiverHost; import org.apache.hadoop.tracing.SpanReceiverHost;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.htrace.Sampler; import org.apache.htrace.Sampler;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
public class TracingFsShell { public class TracingFsShell {
public static void main(String argv[]) throws Exception { public static void main(String argv[]) throws Exception {

View File

@ -0,0 +1,51 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tracing;
import static org.junit.Assert.assertEquals;
import java.util.LinkedList;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.tracing.SpanReceiverInfo.ConfigurationPair;
import org.apache.htrace.HTraceConfiguration;
import org.junit.Test;
public class TestTraceUtils {
@Test
public void testWrappedHadoopConf() {
String key = "sampler";
String value = "ProbabilitySampler";
Configuration conf = new Configuration();
conf.set(TraceUtils.HTRACE_CONF_PREFIX + key, value);
HTraceConfiguration wrapped = TraceUtils.wrapHadoopConf(conf);
assertEquals(value, wrapped.get(key));
}
@Test
public void testExtraConfig() {
String key = "test.extra.config";
String oldValue = "old value";
String newValue = "new value";
Configuration conf = new Configuration();
conf.set(TraceUtils.HTRACE_CONF_PREFIX + key, oldValue);
LinkedList<ConfigurationPair> extraConfig =
new LinkedList<ConfigurationPair>();
extraConfig.add(new ConfigurationPair(key, newValue));
HTraceConfiguration wrapped = TraceUtils.wrapHadoopConf(conf, extraConfig);
assertEquals(newValue, wrapped.get(key));
}
}

View File

@ -181,7 +181,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.htrace</groupId> <groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId> <artifactId>htrace-core</artifactId>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -33,12 +33,12 @@ import org.apache.hadoop.hdfs.shortcircuit.ClientMmap;
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitReplica; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitReplica;
import org.apache.hadoop.util.DirectBufferPool; import org.apache.hadoop.util.DirectBufferPool;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
import org.apache.htrace.Sampler;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.htrace.Sampler;
import org.htrace.Trace;
import org.htrace.TraceScope;
/** /**
* BlockReaderLocal enables local short circuited reads. If the DFS client is on * BlockReaderLocal enables local short circuited reads. If the DFS client is on

View File

@ -46,9 +46,9 @@ import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
import org.htrace.Sampler; import org.apache.htrace.Sampler;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
/** /**
* BlockReaderLocalLegacy enables local short circuited reads. If the DFS client is on * BlockReaderLocalLegacy enables local short circuited reads. If the DFS client is on

View File

@ -48,13 +48,13 @@ import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException; import org.apache.hadoop.hdfs.security.token.block.InvalidBlockTokenException;
import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.htrace.Sampler;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import org.htrace.Sampler;
import org.htrace.Span;
import org.htrace.Trace;
import org.htrace.TraceScope;
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Unstable @InterfaceStability.Unstable

View File

@ -213,16 +213,17 @@ import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenRenewer; import org.apache.hadoop.security.token.TokenRenewer;
import org.apache.hadoop.tracing.SpanReceiverHost; import org.apache.hadoop.tracing.SpanReceiverHost;
import org.apache.hadoop.tracing.TraceSamplerFactory; import org.apache.hadoop.tracing.TraceUtils;
import org.apache.hadoop.util.Daemon; import org.apache.hadoop.util.Daemon;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.util.DataChecksum.Type; import org.apache.hadoop.util.DataChecksum.Type;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.htrace.Sampler; import org.apache.htrace.Sampler;
import org.htrace.Span; import org.apache.htrace.SamplerBuilder;
import org.htrace.Trace; import org.apache.htrace.Span;
import org.htrace.TraceScope; import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
@ -627,7 +628,7 @@ public class DFSClient implements java.io.Closeable, RemotePeerFactory,
Configuration conf, FileSystem.Statistics stats) Configuration conf, FileSystem.Statistics stats)
throws IOException { throws IOException {
SpanReceiverHost.getInstance(conf); SpanReceiverHost.getInstance(conf);
traceSampler = TraceSamplerFactory.createSampler(conf); traceSampler = new SamplerBuilder(TraceUtils.wrapHadoopConf(conf)).build();
// Copy only the required DFSClient configuration // Copy only the required DFSClient configuration
this.dfsClientConf = new Conf(conf); this.dfsClientConf = new Conf(conf);
if (this.dfsClientConf.useLegacyBlockReaderLocal) { if (this.dfsClientConf.useLegacyBlockReaderLocal) {

View File

@ -26,9 +26,9 @@ import org.apache.hadoop.hdfs.inotify.EventBatchList;
import org.apache.hadoop.hdfs.inotify.MissingEventsException; import org.apache.hadoop.hdfs.inotify.MissingEventsException;
import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.htrace.Sampler; import org.apache.htrace.Sampler;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -73,11 +73,11 @@ import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.IdentityHashStore; import org.apache.hadoop.util.IdentityHashStore;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.htrace.Span;
import org.htrace.Trace;
import org.htrace.TraceScope;
/**************************************************************** /****************************************************************
* DFSInputStream provides bytes from a named file. It handles * DFSInputStream provides bytes from a named file. It handles

View File

@ -94,9 +94,9 @@ import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.util.DataChecksum.Type; import org.apache.hadoop.util.DataChecksum.Type;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.htrace.Span; import org.apache.htrace.Span;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;

View File

@ -46,9 +46,9 @@ import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
import org.htrace.Sampler; import org.apache.htrace.Sampler;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
/** /**

View File

@ -51,11 +51,11 @@ import org.apache.hadoop.hdfs.shortcircuit.ClientMmap;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
import org.apache.htrace.Sampler;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.htrace.Sampler;
import org.htrace.Trace;
import org.htrace.TraceScope;
/** /**
* This is a wrapper around connection to datanode * This is a wrapper around connection to datanode

View File

@ -25,11 +25,11 @@ import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.BatchedRemoteIterator; import org.apache.hadoop.fs.BatchedRemoteIterator;
import org.apache.hadoop.fs.InvalidRequestException; import org.apache.hadoop.fs.InvalidRequestException;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.htrace.Sampler;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.htrace.Sampler;
import org.htrace.Trace;
import org.htrace.TraceScope;
/** /**
* CacheDirectiveIterator is a remote iterator that iterates cache directives. * CacheDirectiveIterator is a remote iterator that iterates cache directives.

View File

@ -23,9 +23,9 @@ import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.BatchedRemoteIterator; import org.apache.hadoop.fs.BatchedRemoteIterator;
import org.htrace.Sampler; import org.apache.htrace.Sampler;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
/** /**
* CachePoolIterator is a remote iterator that iterates cache pools. * CachePoolIterator is a remote iterator that iterates cache pools.

View File

@ -23,9 +23,9 @@ import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.BatchedRemoteIterator; import org.apache.hadoop.fs.BatchedRemoteIterator;
import org.htrace.Sampler; import org.apache.htrace.Sampler;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
/** /**
* EncryptionZoneIterator is a remote iterator that iterates over encryption * EncryptionZoneIterator is a remote iterator that iterates over encryption

View File

@ -31,10 +31,10 @@ import org.apache.hadoop.hdfs.protocolPB.PBHelper;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
import org.htrace.Span; import org.apache.htrace.Span;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.TraceInfo; import org.apache.htrace.TraceInfo;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
/** /**
* Static utilities for dealing with the protocol buffers used by the * Static utilities for dealing with the protocol buffers used by the

View File

@ -40,7 +40,7 @@ import org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ShortCircuitShmR
import org.apache.hadoop.hdfs.protocolPB.PBHelper; import org.apache.hadoop.hdfs.protocolPB.PBHelper;
import org.apache.hadoop.hdfs.server.datanode.CachingStrategy; import org.apache.hadoop.hdfs.server.datanode.CachingStrategy;
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.SlotId; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.SlotId;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
/** Receiver */ /** Receiver */
@InterfaceAudience.Private @InterfaceAudience.Private

View File

@ -48,8 +48,8 @@ import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.SlotId;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.Span; import org.apache.htrace.Span;
import com.google.protobuf.Message; import com.google.protobuf.Message;

View File

@ -46,12 +46,12 @@ import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
import org.apache.hadoop.io.nativeio.NativeIO; import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.hadoop.net.SocketOutputStream; import org.apache.hadoop.net.SocketOutputStream;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
import org.apache.htrace.Sampler;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.htrace.Sampler;
import org.htrace.Trace;
import org.htrace.TraceScope;
/** /**
* Reads a block from the disk and sends it to a recipient. * Reads a block from the disk and sends it to a recipient.

View File

@ -88,7 +88,7 @@ import org.apache.hadoop.util.GSet;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.LogManager; import org.apache.log4j.LogManager;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.htrace.Sampler; import org.apache.htrace.Sampler;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;

View File

@ -72,12 +72,12 @@ public class TestTraceAdmin {
Assert.assertEquals("ret:0, Added trace span receiver 1 with " + Assert.assertEquals("ret:0, Added trace span receiver 1 with " +
"configuration local-file-span-receiver.path = " + tracePath + NEWLINE, "configuration local-file-span-receiver.path = " + tracePath + NEWLINE,
runTraceCommand(trace, "-add", "-host", getHostPortForNN(cluster), runTraceCommand(trace, "-add", "-host", getHostPortForNN(cluster),
"-class", "org.htrace.impl.LocalFileSpanReceiver", "-class", "org.apache.htrace.impl.LocalFileSpanReceiver",
"-Clocal-file-span-receiver.path=" + tracePath)); "-Clocal-file-span-receiver.path=" + tracePath));
String list = String list =
runTraceCommand(trace, "-list", "-host", getHostPortForNN(cluster)); runTraceCommand(trace, "-list", "-host", getHostPortForNN(cluster));
Assert.assertTrue(list.startsWith("ret:0")); Assert.assertTrue(list.startsWith("ret:0"));
Assert.assertTrue(list.contains("1 org.htrace.impl.LocalFileSpanReceiver")); Assert.assertTrue(list.contains("1 org.apache.htrace.impl.LocalFileSpanReceiver"));
Assert.assertEquals("ret:0, Removed trace span receiver 1" + NEWLINE, Assert.assertEquals("ret:0, Removed trace span receiver 1" + NEWLINE,
runTraceCommand(trace, "-remove", "1", "-host", runTraceCommand(trace, "-remove", "1", "-host",
getHostPortForNN(cluster))); getHostPortForNN(cluster)));

View File

@ -25,12 +25,12 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.htrace.HTraceConfiguration; import org.apache.htrace.HTraceConfiguration;
import org.htrace.Sampler; import org.apache.htrace.Sampler;
import org.htrace.Span; import org.apache.htrace.Span;
import org.htrace.SpanReceiver; import org.apache.htrace.SpanReceiver;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
@ -101,8 +101,6 @@ public class TestTracing {
Assert.assertNotNull(s); Assert.assertNotNull(s);
long spanStart = s.getStartTimeMillis(); long spanStart = s.getStartTimeMillis();
long spanEnd = s.getStopTimeMillis(); long spanEnd = s.getStopTimeMillis();
Assert.assertTrue(spanStart - startTime < 100);
Assert.assertTrue(spanEnd - endTime < 100);
// There should only be one trace id as it should all be homed in the // There should only be one trace id as it should all be homed in the
// top trace. // top trace.
@ -272,7 +270,7 @@ public class TestTracing {
*/ */
public static class SetSpanReceiver implements SpanReceiver { public static class SetSpanReceiver implements SpanReceiver {
public void configure(HTraceConfiguration conf) { public SetSpanReceiver(HTraceConfiguration conf) {
} }
public void receiveSpan(Span span) { public void receiveSpan(Span span) {

View File

@ -30,10 +30,10 @@ import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.unix.DomainSocket; import org.apache.hadoop.net.unix.DomainSocket;
import org.apache.hadoop.net.unix.TemporarySocketDirectory; import org.apache.hadoop.net.unix.TemporarySocketDirectory;
import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.NativeCodeLoader;
import org.htrace.Sampler; import org.apache.htrace.Sampler;
import org.htrace.Span; import org.apache.htrace.Span;
import org.htrace.Trace; import org.apache.htrace.Trace;
import org.htrace.TraceScope; import org.apache.htrace.TraceScope;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
import org.junit.BeforeClass; import org.junit.BeforeClass;

View File

@ -782,9 +782,9 @@
<version>0.1.42</version> <version>0.1.42</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.htrace</groupId> <groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId> <artifactId>htrace-core</artifactId>
<version>3.0.4</version> <version>3.1.0-incubating</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.jdom</groupId> <groupId>org.jdom</groupId>