HBASE-12810 Update to htrace-incubating

This commit is contained in:
Nick Dimiduk 2015-01-22 14:32:21 -08:00
parent 44fa36b425
commit 05f4e0c715
30 changed files with 87 additions and 112 deletions

View File

@ -133,7 +133,7 @@
<artifactId>zookeeper</artifactId>
</dependency>
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
<dependency>

View File

@ -52,7 +52,7 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.htrace.Trace;
import org.apache.htrace.Trace;
import com.google.common.annotations.VisibleForTesting;

View File

@ -25,7 +25,7 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.htrace.Trace;
import org.apache.htrace.Trace;
/**
* A completion service for the RpcRetryingCallerFactory.

View File

@ -60,9 +60,9 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.token.TokenSelector;
import org.htrace.Span;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import javax.net.SocketFactory;
import javax.security.sasl.SaslException;

View File

@ -45,8 +45,8 @@ import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Stat;
import org.apache.zookeeper.proto.CreateRequest;
import org.apache.zookeeper.proto.SetDataRequest;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
/**
* A zookeeper that can handle 'recoverable' errors.

View File

@ -226,7 +226,7 @@
</dependency>
<!-- tracing Dependencies -->
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
</dependencies>

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.trace;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.htrace.HTraceConfiguration;
import org.apache.htrace.HTraceConfiguration;
@InterfaceAudience.Private
public class HBaseHTraceConfiguration extends HTraceConfiguration {

View File

@ -25,8 +25,9 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.htrace.SpanReceiver;
import org.htrace.Trace;
import org.apache.htrace.SpanReceiver;
import org.apache.htrace.SpanReceiverBuilder;
import org.apache.htrace.Trace;
/**
* This class provides functions for reading the names of SpanReceivers from
@ -67,62 +68,31 @@ public class SpanReceiverHost {
}
/**
* Reads the names of classes specified in the
* "hbase.trace.spanreceiver.classes" property and instantiates and registers
* them with the Tracer as SpanReceiver's.
* Reads the names of classes specified in the {@code hbase.trace.spanreceiver.classes} property
* and instantiates and registers them with the Tracer.
*
*/
public void loadSpanReceivers() {
Class<?> implClass = null;
String[] receiverNames = conf.getStrings(SPAN_RECEIVERS_CONF_KEY);
if (receiverNames == null || receiverNames.length == 0) {
return;
}
SpanReceiverBuilder builder = new SpanReceiverBuilder(new HBaseHTraceConfiguration(conf));
for (String className : receiverNames) {
className = className.trim();
try {
implClass = Class.forName(className);
SpanReceiver receiver = loadInstance(implClass);
SpanReceiver receiver = builder.spanReceiverClass(className).build();
if (receiver != null) {
receivers.add(receiver);
LOG.info("SpanReceiver " + className + " was loaded successfully.");
}
} catch (ClassNotFoundException e) {
LOG.warn("Class " + className + " cannot be found. " + e.getMessage());
} catch (IOException e) {
LOG.warn("Load SpanReceiver " + className + " failed. "
+ e.getMessage());
}
}
for (SpanReceiver rcvr : receivers) {
Trace.addReceiver(rcvr);
}
}
private SpanReceiver loadInstance(Class<?> implClass)
throws IOException {
SpanReceiver impl = null;
try {
Object o = implClass.newInstance();
impl = (SpanReceiver)o;
impl.configure(new HBaseHTraceConfiguration(this.conf));
} catch (SecurityException e) {
throw new IOException(e);
} catch (IllegalArgumentException e) {
throw new IOException(e);
} catch (RuntimeException e) {
throw new IOException(e);
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
return impl;
}
/**
* Calls close() on all SpanReceivers created by this SpanReceiverHost.
*/

View File

@ -202,7 +202,7 @@
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
<dependency>

View File

@ -65,10 +65,10 @@ import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.LoadTestTool;
import org.htrace.Span;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.htrace.impl.AlwaysSampler;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import org.apache.htrace.impl.AlwaysSampler;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;

View File

@ -35,9 +35,9 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.ToolRunner;
import org.htrace.Sampler;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Sampler;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import org.junit.Test;
import org.junit.experimental.categories.Category;

View File

@ -447,7 +447,7 @@
</dependency>
<!-- tracing Dependencies -->
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
<dependency>

View File

@ -25,9 +25,9 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Server;
import org.htrace.Span;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
/**
* Abstract base class for all HBase event handlers. Subclasses should

View File

@ -43,8 +43,8 @@ import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.IdLock;
import org.apache.hadoop.io.WritableUtils;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting;

View File

@ -27,8 +27,8 @@ import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.protobuf.Message;

View File

@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.security.User;
import com.google.protobuf.BlockingService;
import org.apache.hadoop.hbase.util.Bytes;
import org.htrace.Trace;
import org.apache.htrace.Trace;
import java.net.InetAddress;

View File

@ -119,7 +119,7 @@ import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.StringUtils;
import org.codehaus.jackson.map.ObjectMapper;
import org.htrace.TraceInfo;
import org.apache.htrace.TraceInfo;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.protobuf.BlockingService;

View File

@ -729,7 +729,7 @@ public class TableMapReduceUtil {
io.netty.channel.Channel.class,
com.google.protobuf.Message.class,
com.google.common.collect.Lists.class,
org.htrace.Trace.class);
org.apache.htrace.Trace.class);
}
/**

View File

@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.master.TableLockManager;
import org.apache.hadoop.hbase.master.RegionState.State;
import org.apache.hadoop.hbase.master.TableLockManager.TableLock;
import org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos;
import org.htrace.Trace;
import org.apache.htrace.Trace;
/**
* Handler to run disable of a table.

View File

@ -50,8 +50,8 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.HasThread;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import org.apache.hadoop.hbase.util.Counter;
import com.google.common.base.Preconditions;

View File

@ -85,10 +85,10 @@ import org.apache.hadoop.hbase.wal.WALProvider.Writer;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.util.StringUtils;
import org.htrace.NullScope;
import org.htrace.Span;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.NullScope;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Maps;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.regionserver.wal;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.htrace.Span;
import org.apache.htrace.Span;
import com.lmax.disruptor.EventFactory;

View File

@ -21,7 +21,7 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.htrace.Span;
import org.apache.htrace.Span;
/**
* A Future on a filesystem sync call. It given to a client or 'Handler' for it to wait on till

View File

@ -77,6 +77,7 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.RandomDistribution;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
import org.apache.hadoop.hbase.trace.SpanReceiverHost;
import org.apache.hadoop.hbase.util.*;
import org.apache.hadoop.io.LongWritable;
@ -94,10 +95,10 @@ import com.yammer.metrics.core.Histogram;
import com.yammer.metrics.stats.UniformSample;
import com.yammer.metrics.stats.Snapshot;
import org.htrace.Sampler;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.htrace.impl.ProbabilitySampler;
import org.apache.htrace.Sampler;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import org.apache.htrace.impl.ProbabilitySampler;
/**
* Script used evaluating HBase performance and scalability. Runs a HBase
@ -921,7 +922,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
*/
Test(final Connection con, final TestOptions options, final Status status) {
this.connection = con;
this.conf = con.getConfiguration();
this.conf = con == null ? HBaseConfiguration.create() : this.connection.getConfiguration();
this.opts = options;
this.status = status;
this.testName = this.getClass().getSimpleName();
@ -929,7 +930,8 @@ public class PerformanceEvaluation extends Configured implements Tool {
if (options.traceRate >= 1.0) {
this.traceSampler = Sampler.ALWAYS;
} else if (options.traceRate > 0.0) {
this.traceSampler = new ProbabilitySampler(options.traceRate);
conf.setDouble("hbase.sampler.fraction", options.traceRate);
this.traceSampler = new ProbabilitySampler(new HBaseHTraceConfiguration(conf));
} else {
this.traceSampler = Sampler.NEVER;
}

View File

@ -29,12 +29,12 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.htrace.Sampler;
import org.htrace.Span;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.htrace.TraceTree;
import org.htrace.impl.POJOSpanReceiver;
import org.apache.htrace.Sampler;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import org.apache.htrace.TraceTree;
import org.apache.htrace.impl.POJOSpanReceiver;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@ -47,11 +47,12 @@ public class TestHTraceHooks {
private static final byte[] FAMILY_BYTES = "family".getBytes();
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final POJOSpanReceiver rcvr = new POJOSpanReceiver();
private static POJOSpanReceiver rcvr;
@BeforeClass
public static void before() throws Exception {
TEST_UTIL.startMiniCluster(2, 3);
rcvr = new POJOSpanReceiver(new HBaseHTraceConfiguration(TEST_UTIL.getConfiguration()));
Trace.addReceiver(rcvr);
}
@ -59,6 +60,7 @@ public class TestHTraceHooks {
public static void after() throws Exception {
TEST_UTIL.shutdownMiniCluster();
Trace.removeReceiver(rcvr);
rcvr = null;
}
@Test
@ -84,25 +86,23 @@ public class TestHTraceHooks {
Collection<Span> spans = rcvr.getSpans();
TraceTree traceTree = new TraceTree(spans);
Collection<Span> roots = traceTree.getRoots();
Collection<Span> roots = traceTree.getSpansByParent().find(Span.ROOT_SPAN_ID);
assertEquals(1, roots.size());
Span createTableRoot = roots.iterator().next();
assertEquals("creating table", createTableRoot.getDescription());
Multimap<Long, Span> spansByParentIdMap = traceTree
.getSpansByParentIdMap();
int createTableCount = 0;
for (Span s : spansByParentIdMap.get(createTableRoot.getSpanId())) {
for (Span s : traceTree.getSpansByParent().find(createTableRoot.getSpanId())) {
if (s.getDescription().startsWith("MasterService.CreateTable")) {
createTableCount++;
}
}
assertTrue(createTableCount >= 1);
assertTrue(spansByParentIdMap.get(createTableRoot.getSpanId()).size() > 3);
assertTrue(traceTree.getSpansByParent().find(createTableRoot.getSpanId()).size() > 3);
assertTrue(spans.size() > 5);
Put put = new Put("row".getBytes());
@ -117,7 +117,7 @@ public class TestHTraceHooks {
spans = rcvr.getSpans();
traceTree = new TraceTree(spans);
roots = traceTree.getRoots();
roots = traceTree.getSpansByParent().find(Span.ROOT_SPAN_ID);
assertEquals(2, roots.size());
Span putRoot = null;

View File

@ -50,6 +50,7 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.LogRoller;
import org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
import org.apache.hadoop.hbase.trace.SpanReceiverHost;
import org.apache.hadoop.hbase.wal.WALProvider.Writer;
import org.apache.hadoop.hbase.wal.WAL;
@ -58,10 +59,11 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.htrace.Sampler;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.htrace.impl.ProbabilitySampler;
import org.apache.htrace.HTraceConfiguration;
import org.apache.htrace.Sampler;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import org.apache.htrace.impl.ProbabilitySampler;
import com.yammer.metrics.core.Histogram;
import com.yammer.metrics.core.Meter;
@ -150,7 +152,8 @@ public final class WALPerformanceEvaluation extends Configured implements Tool {
+ " SpanReciever can keep up.");
}
} else {
loopSampler = new ProbabilitySampler(traceFreq);
getConf().setDouble("hbase.sampler.fraction", traceFreq);
loopSampler = new ProbabilitySampler(new HBaseHTraceConfiguration(getConf()));
}
}
}

View File

@ -210,7 +210,7 @@
</dependency>
<!-- Test Dependencies -->
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
</dependencies>

View File

@ -16,8 +16,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
HTrace = org.htrace.Trace
java_import org.htrace.Sampler
HTrace = org.apache.htrace.Trace
java_import org.apache.htrace.Sampler
java_import org.apache.hadoop.hbase.trace.SpanReceiverHost
module Shell

View File

@ -212,7 +212,7 @@
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
<dependency>

View File

@ -1018,7 +1018,7 @@
<jruby.version>1.6.8</jruby.version>
<junit.version>4.11</junit.version>
<hamcrest.version>1.3</hamcrest.version>
<htrace.version>3.0.4</htrace.version>
<htrace.version>3.1.0-incubating</htrace.version>
<log4j.version>1.2.17</log4j.version>
<mockito-all.version>1.10.8</mockito-all.version>
<protobuf.version>2.5.0</protobuf.version>
@ -1524,7 +1524,7 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
<version>${htrace.version}</version>
</dependency>