HBASE-12810 Update to htrace-incubating

This commit is contained in:
Nick Dimiduk 2015-01-21 13:02:24 -08:00
parent 508085fe5a
commit 9824eb7840
30 changed files with 85 additions and 110 deletions

View File

@ -152,7 +152,7 @@
<artifactId>zookeeper</artifactId>
</dependency>
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
<dependency>

View File

@ -54,7 +54,7 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.htrace.Trace;
import org.apache.htrace.Trace;
import com.google.common.annotations.VisibleForTesting;

View File

@ -25,7 +25,7 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.htrace.Trace;
import org.apache.htrace.Trace;
/**
* A completion service for the RpcRetryingCallerFactory.
@ -162,4 +162,4 @@ public class ResultBoundedCompletionService<V> {
if (future != null) future.cancel(true);
}
}
}
}

View File

@ -60,9 +60,9 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.token.TokenSelector;
import org.htrace.Span;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import javax.net.SocketFactory;
import javax.security.sasl.SaslException;

View File

@ -45,8 +45,8 @@ import org.apache.zookeeper.data.ACL;
import org.apache.zookeeper.data.Stat;
import org.apache.zookeeper.proto.CreateRequest;
import org.apache.zookeeper.proto.SetDataRequest;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
/**
* A zookeeper that can handle 'recoverable' errors.

View File

@ -249,7 +249,7 @@
</dependency>
<!-- tracing Dependencies -->
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
</dependencies>

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.trace;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.htrace.HTraceConfiguration;
import org.apache.htrace.HTraceConfiguration;
@InterfaceAudience.Private
public class HBaseHTraceConfiguration extends HTraceConfiguration {

View File

@ -25,8 +25,9 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.htrace.SpanReceiver;
import org.htrace.Trace;
import org.apache.htrace.SpanReceiver;
import org.apache.htrace.SpanReceiverBuilder;
import org.apache.htrace.Trace;
/**
* This class provides functions for reading the names of SpanReceivers from
@ -67,33 +68,24 @@ public class SpanReceiverHost {
}
/**
* Reads the names of classes specified in the
* "hbase.trace.spanreceiver.classes" property and instantiates and registers
* them with the Tracer as SpanReceiver's.
* Reads the names of classes specified in the {@code hbase.trace.spanreceiver.classes} property
* and instantiates and registers them with the Tracer.
*
*/
public void loadSpanReceivers() {
Class<?> implClass = null;
String[] receiverNames = conf.getStrings(SPAN_RECEIVERS_CONF_KEY);
if (receiverNames == null || receiverNames.length == 0) {
return;
}
SpanReceiverBuilder builder = new SpanReceiverBuilder(new HBaseHTraceConfiguration(conf));
for (String className : receiverNames) {
className = className.trim();
try {
implClass = Class.forName(className);
SpanReceiver receiver = loadInstance(implClass);
if (receiver != null) {
receivers.add(receiver);
LOG.info("SpanReceiver " + className + " was loaded successfully.");
}
} catch (ClassNotFoundException e) {
LOG.warn("Class " + className + " cannot be found. " + e.getMessage());
} catch (IOException e) {
LOG.warn("Load SpanReceiver " + className + " failed. "
+ e.getMessage());
SpanReceiver receiver = builder.spanReceiverClass(className).build();
if (receiver != null) {
receivers.add(receiver);
LOG.info("SpanReceiver " + className + " was loaded successfully.");
}
}
for (SpanReceiver rcvr : receivers) {
@ -101,28 +93,6 @@ public class SpanReceiverHost {
}
}
private SpanReceiver loadInstance(Class<?> implClass)
throws IOException {
SpanReceiver impl = null;
try {
Object o = implClass.newInstance();
impl = (SpanReceiver)o;
impl.configure(new HBaseHTraceConfiguration(this.conf));
} catch (SecurityException e) {
throw new IOException(e);
} catch (IllegalArgumentException e) {
throw new IOException(e);
} catch (RuntimeException e) {
throw new IOException(e);
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
return impl;
}
/**
* Calls close() on all SpanReceivers created by this SpanReceiverHost.
*/

View File

@ -221,7 +221,7 @@
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
<dependency>

View File

@ -65,10 +65,10 @@ import org.apache.hadoop.hbase.regionserver.NoSuchColumnFamilyException;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.LoadTestTool;
import org.htrace.Span;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.htrace.impl.AlwaysSampler;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import org.apache.htrace.impl.AlwaysSampler;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;

View File

@ -35,9 +35,9 @@ import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.ToolRunner;
import org.htrace.Sampler;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Sampler;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import org.junit.Test;
import org.junit.experimental.categories.Category;

View File

@ -466,7 +466,7 @@
</dependency>
<!-- tracing Dependencies -->
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
<dependency>

View File

@ -25,9 +25,9 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Server;
import org.htrace.Span;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
/**
* Abstract base class for all HBase event handlers. Subclasses should

View File

@ -43,8 +43,8 @@ import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.IdLock;
import org.apache.hadoop.io.WritableUtils;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting;

View File

@ -27,8 +27,8 @@ import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.protobuf.Message;

View File

@ -26,7 +26,7 @@ import org.apache.hadoop.hbase.security.User;
import com.google.protobuf.BlockingService;
import org.apache.hadoop.hbase.util.Bytes;
import org.htrace.Trace;
import org.apache.htrace.Trace;
import java.net.InetAddress;

View File

@ -119,7 +119,7 @@ import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.StringUtils;
import org.codehaus.jackson.map.ObjectMapper;
import org.htrace.TraceInfo;
import org.apache.htrace.TraceInfo;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.protobuf.BlockingService;

View File

@ -729,7 +729,7 @@ public class TableMapReduceUtil {
io.netty.channel.Channel.class,
com.google.protobuf.Message.class,
com.google.common.collect.Lists.class,
org.htrace.Trace.class);
org.apache.htrace.Trace.class);
}
/**

View File

@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.master.RegionState;
import org.apache.hadoop.hbase.master.RegionStates;
import org.apache.hadoop.hbase.master.TableLockManager;
import org.apache.hadoop.hbase.master.TableLockManager.TableLock;
import org.htrace.Trace;
import org.apache.htrace.Trace;
/**
* Handler to run disable of a table.

View File

@ -51,8 +51,8 @@ import org.apache.hadoop.hbase.util.HasThread;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.base.Preconditions;

View File

@ -84,10 +84,10 @@ import org.apache.hadoop.hbase.wal.WALProvider.Writer;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.util.StringUtils;
import org.htrace.NullScope;
import org.htrace.Span;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.apache.htrace.NullScope;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Maps;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.regionserver.wal;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.htrace.Span;
import org.apache.htrace.Span;
import com.lmax.disruptor.EventFactory;

View File

@ -21,7 +21,7 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.htrace.Span;
import org.apache.htrace.Span;
/**
* A Future on a filesystem sync call. It given to a client or 'Handler' for it to wait on till

View File

@ -74,6 +74,7 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.RandomDistribution;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
import org.apache.hadoop.hbase.trace.SpanReceiverHost;
import org.apache.hadoop.hbase.util.*;
import org.apache.hadoop.io.LongWritable;
@ -86,10 +87,10 @@ import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.codehaus.jackson.map.ObjectMapper;
import org.htrace.Sampler;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.htrace.impl.ProbabilitySampler;
import org.apache.htrace.Sampler;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import org.apache.htrace.impl.ProbabilitySampler;
import com.google.common.base.Objects;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
@ -952,7 +953,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
*/
Test(final Connection con, final TestOptions options, final Status status) {
this.connection = con;
this.conf = con == null? null: this.connection.getConfiguration();
this.conf = con == null ? HBaseConfiguration.create() : this.connection.getConfiguration();
this.receiverHost = this.conf == null? null: SpanReceiverHost.getInstance(conf);
this.opts = options;
this.status = status;
@ -960,7 +961,8 @@ public class PerformanceEvaluation extends Configured implements Tool {
if (options.traceRate >= 1.0) {
this.traceSampler = Sampler.ALWAYS;
} else if (options.traceRate > 0.0) {
this.traceSampler = new ProbabilitySampler(options.traceRate);
conf.setDouble("hbase.sampler.fraction", options.traceRate);
this.traceSampler = new ProbabilitySampler(new HBaseHTraceConfiguration(conf));
} else {
this.traceSampler = Sampler.NEVER;
}

View File

@ -30,12 +30,12 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.htrace.Sampler;
import org.htrace.Span;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.htrace.TraceTree;
import org.htrace.impl.POJOSpanReceiver;
import org.apache.htrace.Sampler;
import org.apache.htrace.Span;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import org.apache.htrace.TraceTree;
import org.apache.htrace.impl.POJOSpanReceiver;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@ -48,11 +48,12 @@ public class TestHTraceHooks {
private static final byte[] FAMILY_BYTES = "family".getBytes();
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final POJOSpanReceiver rcvr = new POJOSpanReceiver();
private static POJOSpanReceiver rcvr;
@BeforeClass
public static void before() throws Exception {
TEST_UTIL.startMiniCluster(2, 3);
rcvr = new POJOSpanReceiver(new HBaseHTraceConfiguration(TEST_UTIL.getConfiguration()));
Trace.addReceiver(rcvr);
}
@ -60,6 +61,7 @@ public class TestHTraceHooks {
public static void after() throws Exception {
TEST_UTIL.shutdownMiniCluster();
Trace.removeReceiver(rcvr);
rcvr = null;
}
@Test
@ -85,25 +87,23 @@ public class TestHTraceHooks {
Collection<Span> spans = rcvr.getSpans();
TraceTree traceTree = new TraceTree(spans);
Collection<Span> roots = traceTree.getRoots();
Collection<Span> roots = traceTree.getSpansByParent().find(Span.ROOT_SPAN_ID);
assertEquals(1, roots.size());
Span createTableRoot = roots.iterator().next();
assertEquals("creating table", createTableRoot.getDescription());
Multimap<Long, Span> spansByParentIdMap = traceTree
.getSpansByParentIdMap();
int createTableCount = 0;
for (Span s : spansByParentIdMap.get(createTableRoot.getSpanId())) {
for (Span s : traceTree.getSpansByParent().find(createTableRoot.getSpanId())) {
if (s.getDescription().startsWith("MasterService.CreateTable")) {
createTableCount++;
}
}
assertTrue(createTableCount >= 1);
assertTrue(spansByParentIdMap.get(createTableRoot.getSpanId()).size() > 3);
assertTrue(traceTree.getSpansByParent().find(createTableRoot.getSpanId()).size() > 3);
assertTrue(spans.size() > 5);
Put put = new Put("row".getBytes());
@ -118,7 +118,7 @@ public class TestHTraceHooks {
spans = rcvr.getSpans();
traceTree = new TraceTree(spans);
roots = traceTree.getRoots();
roots = traceTree.getSpansByParent().find(Span.ROOT_SPAN_ID);
assertEquals(2, roots.size());
Span putRoot = null;

View File

@ -50,6 +50,7 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.crypto.KeyProviderForTesting;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.LogRoller;
import org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
import org.apache.hadoop.hbase.trace.SpanReceiverHost;
import org.apache.hadoop.hbase.wal.WALProvider.Writer;
import org.apache.hadoop.hbase.wal.WAL;
@ -58,10 +59,11 @@ import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.htrace.Sampler;
import org.htrace.Trace;
import org.htrace.TraceScope;
import org.htrace.impl.ProbabilitySampler;
import org.apache.htrace.HTraceConfiguration;
import org.apache.htrace.Sampler;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import org.apache.htrace.impl.ProbabilitySampler;
import com.yammer.metrics.core.Histogram;
import com.yammer.metrics.core.Meter;
@ -150,7 +152,8 @@ public final class WALPerformanceEvaluation extends Configured implements Tool {
+ " SpanReciever can keep up.");
}
} else {
loopSampler = new ProbabilitySampler(traceFreq);
getConf().setDouble("hbase.sampler.fraction", traceFreq);
loopSampler = new ProbabilitySampler(new HBaseHTraceConfiguration(getConf()));
}
}
}

View File

@ -231,7 +231,7 @@
</dependency>
<!-- Test Dependencies -->
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
</dependencies>

View File

@ -16,8 +16,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
HTrace = org.htrace.Trace
java_import org.htrace.Sampler
HTrace = org.apache.htrace.Trace
java_import org.apache.htrace.Sampler
java_import org.apache.hadoop.hbase.trace.SpanReceiverHost
module Shell

View File

@ -241,7 +241,7 @@
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
<dependency>

View File

@ -1036,7 +1036,7 @@
<jruby.version>1.6.8</jruby.version>
<junit.version>4.11</junit.version>
<hamcrest.version>1.3</hamcrest.version>
<htrace.version>3.0.4</htrace.version>
<htrace.version>3.1.0-incubating</htrace.version>
<log4j.version>1.2.17</log4j.version>
<mockito-all.version>1.10.8</mockito-all.version>
<protobuf.version>2.5.0</protobuf.version>
@ -1530,7 +1530,7 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.htrace</groupId>
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
<version>${htrace.version}</version>
</dependency>