HBASE-6524 Hooks for hbase tracing; REAPPLICATION
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1378809 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
a2788de625
commit
8eb8b3a40a
|
@ -0,0 +1,490 @@
|
|||
// Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
// source: Tracing.proto
|
||||
|
||||
package org.apache.hadoop.hbase.protobuf.generated;
|
||||
|
||||
public final class Tracing {
|
||||
private Tracing() {}
|
||||
public static void registerAllExtensions(
|
||||
com.google.protobuf.ExtensionRegistry registry) {
|
||||
}
|
||||
public interface RPCTInfoOrBuilder
|
||||
extends com.google.protobuf.MessageOrBuilder {
|
||||
|
||||
// optional int64 traceId = 1;
|
||||
boolean hasTraceId();
|
||||
long getTraceId();
|
||||
|
||||
// optional int64 parentId = 2;
|
||||
boolean hasParentId();
|
||||
long getParentId();
|
||||
}
|
||||
public static final class RPCTInfo extends
|
||||
com.google.protobuf.GeneratedMessage
|
||||
implements RPCTInfoOrBuilder {
|
||||
// Use RPCTInfo.newBuilder() to construct.
|
||||
private RPCTInfo(Builder builder) {
|
||||
super(builder);
|
||||
}
|
||||
private RPCTInfo(boolean noInit) {}
|
||||
|
||||
private static final RPCTInfo defaultInstance;
|
||||
public static RPCTInfo getDefaultInstance() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
public RPCTInfo getDefaultInstanceForType() {
|
||||
return defaultInstance;
|
||||
}
|
||||
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable;
|
||||
}
|
||||
|
||||
private int bitField0_;
|
||||
// optional int64 traceId = 1;
|
||||
public static final int TRACEID_FIELD_NUMBER = 1;
|
||||
private long traceId_;
|
||||
public boolean hasTraceId() {
|
||||
return ((bitField0_ & 0x00000001) == 0x00000001);
|
||||
}
|
||||
public long getTraceId() {
|
||||
return traceId_;
|
||||
}
|
||||
|
||||
// optional int64 parentId = 2;
|
||||
public static final int PARENTID_FIELD_NUMBER = 2;
|
||||
private long parentId_;
|
||||
public boolean hasParentId() {
|
||||
return ((bitField0_ & 0x00000002) == 0x00000002);
|
||||
}
|
||||
public long getParentId() {
|
||||
return parentId_;
|
||||
}
|
||||
|
||||
private void initFields() {
|
||||
traceId_ = 0L;
|
||||
parentId_ = 0L;
|
||||
}
|
||||
private byte memoizedIsInitialized = -1;
|
||||
public final boolean isInitialized() {
|
||||
byte isInitialized = memoizedIsInitialized;
|
||||
if (isInitialized != -1) return isInitialized == 1;
|
||||
|
||||
memoizedIsInitialized = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
public void writeTo(com.google.protobuf.CodedOutputStream output)
|
||||
throws java.io.IOException {
|
||||
getSerializedSize();
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
output.writeInt64(1, traceId_);
|
||||
}
|
||||
if (((bitField0_ & 0x00000002) == 0x00000002)) {
|
||||
output.writeInt64(2, parentId_);
|
||||
}
|
||||
getUnknownFields().writeTo(output);
|
||||
}
|
||||
|
||||
private int memoizedSerializedSize = -1;
|
||||
public int getSerializedSize() {
|
||||
int size = memoizedSerializedSize;
|
||||
if (size != -1) return size;
|
||||
|
||||
size = 0;
|
||||
if (((bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeInt64Size(1, traceId_);
|
||||
}
|
||||
if (((bitField0_ & 0x00000002) == 0x00000002)) {
|
||||
size += com.google.protobuf.CodedOutputStream
|
||||
.computeInt64Size(2, parentId_);
|
||||
}
|
||||
size += getUnknownFields().getSerializedSize();
|
||||
memoizedSerializedSize = size;
|
||||
return size;
|
||||
}
|
||||
|
||||
private static final long serialVersionUID = 0L;
|
||||
@java.lang.Override
|
||||
protected java.lang.Object writeReplace()
|
||||
throws java.io.ObjectStreamException {
|
||||
return super.writeReplace();
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public boolean equals(final java.lang.Object obj) {
|
||||
if (obj == this) {
|
||||
return true;
|
||||
}
|
||||
if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo)) {
|
||||
return super.equals(obj);
|
||||
}
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other = (org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) obj;
|
||||
|
||||
boolean result = true;
|
||||
result = result && (hasTraceId() == other.hasTraceId());
|
||||
if (hasTraceId()) {
|
||||
result = result && (getTraceId()
|
||||
== other.getTraceId());
|
||||
}
|
||||
result = result && (hasParentId() == other.hasParentId());
|
||||
if (hasParentId()) {
|
||||
result = result && (getParentId()
|
||||
== other.getParentId());
|
||||
}
|
||||
result = result &&
|
||||
getUnknownFields().equals(other.getUnknownFields());
|
||||
return result;
|
||||
}
|
||||
|
||||
@java.lang.Override
|
||||
public int hashCode() {
|
||||
int hash = 41;
|
||||
hash = (19 * hash) + getDescriptorForType().hashCode();
|
||||
if (hasTraceId()) {
|
||||
hash = (37 * hash) + TRACEID_FIELD_NUMBER;
|
||||
hash = (53 * hash) + hashLong(getTraceId());
|
||||
}
|
||||
if (hasParentId()) {
|
||||
hash = (37 * hash) + PARENTID_FIELD_NUMBER;
|
||||
hash = (53 * hash) + hashLong(getParentId());
|
||||
}
|
||||
hash = (29 * hash) + getUnknownFields().hashCode();
|
||||
return hash;
|
||||
}
|
||||
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
|
||||
com.google.protobuf.ByteString data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
|
||||
com.google.protobuf.ByteString data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(byte[] data)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
|
||||
byte[] data,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
return newBuilder().mergeFrom(data, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(java.io.InputStream input)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseDelimitedFrom(
|
||||
java.io.InputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
Builder builder = newBuilder();
|
||||
if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
|
||||
return builder.buildParsed();
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
|
||||
com.google.protobuf.CodedInputStream input)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input).buildParsed();
|
||||
}
|
||||
public static org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo parseFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
return newBuilder().mergeFrom(input, extensionRegistry)
|
||||
.buildParsed();
|
||||
}
|
||||
|
||||
public static Builder newBuilder() { return Builder.create(); }
|
||||
public Builder newBuilderForType() { return newBuilder(); }
|
||||
public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo prototype) {
|
||||
return newBuilder().mergeFrom(prototype);
|
||||
}
|
||||
public Builder toBuilder() { return newBuilder(this); }
|
||||
|
||||
@java.lang.Override
|
||||
protected Builder newBuilderForType(
|
||||
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
|
||||
Builder builder = new Builder(parent);
|
||||
return builder;
|
||||
}
|
||||
public static final class Builder extends
|
||||
com.google.protobuf.GeneratedMessage.Builder<Builder>
|
||||
implements org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfoOrBuilder {
|
||||
public static final com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptor() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_descriptor;
|
||||
}
|
||||
|
||||
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internalGetFieldAccessorTable() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.internal_static_RPCTInfo_fieldAccessorTable;
|
||||
}
|
||||
|
||||
// Construct using org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.newBuilder()
|
||||
private Builder() {
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
|
||||
private Builder(BuilderParent parent) {
|
||||
super(parent);
|
||||
maybeForceBuilderInitialization();
|
||||
}
|
||||
private void maybeForceBuilderInitialization() {
|
||||
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
|
||||
}
|
||||
}
|
||||
private static Builder create() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public Builder clear() {
|
||||
super.clear();
|
||||
traceId_ = 0L;
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
parentId_ = 0L;
|
||||
bitField0_ = (bitField0_ & ~0x00000002);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder clone() {
|
||||
return create().mergeFrom(buildPartial());
|
||||
}
|
||||
|
||||
public com.google.protobuf.Descriptors.Descriptor
|
||||
getDescriptorForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDescriptor();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo getDefaultInstanceForType() {
|
||||
return org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance();
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo build() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildParsed()
|
||||
throws com.google.protobuf.InvalidProtocolBufferException {
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = buildPartial();
|
||||
if (!result.isInitialized()) {
|
||||
throw newUninitializedMessageException(
|
||||
result).asInvalidProtocolBufferException();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo buildPartial() {
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo result = new org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo(this);
|
||||
int from_bitField0_ = bitField0_;
|
||||
int to_bitField0_ = 0;
|
||||
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
|
||||
to_bitField0_ |= 0x00000001;
|
||||
}
|
||||
result.traceId_ = traceId_;
|
||||
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
|
||||
to_bitField0_ |= 0x00000002;
|
||||
}
|
||||
result.parentId_ = parentId_;
|
||||
result.bitField0_ = to_bitField0_;
|
||||
onBuilt();
|
||||
return result;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(com.google.protobuf.Message other) {
|
||||
if (other instanceof org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo) {
|
||||
return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo)other);
|
||||
} else {
|
||||
super.mergeFrom(other);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo other) {
|
||||
if (other == org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.getDefaultInstance()) return this;
|
||||
if (other.hasTraceId()) {
|
||||
setTraceId(other.getTraceId());
|
||||
}
|
||||
if (other.hasParentId()) {
|
||||
setParentId(other.getParentId());
|
||||
}
|
||||
this.mergeUnknownFields(other.getUnknownFields());
|
||||
return this;
|
||||
}
|
||||
|
||||
public final boolean isInitialized() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public Builder mergeFrom(
|
||||
com.google.protobuf.CodedInputStream input,
|
||||
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
|
||||
throws java.io.IOException {
|
||||
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
|
||||
com.google.protobuf.UnknownFieldSet.newBuilder(
|
||||
this.getUnknownFields());
|
||||
while (true) {
|
||||
int tag = input.readTag();
|
||||
switch (tag) {
|
||||
case 0:
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
default: {
|
||||
if (!parseUnknownField(input, unknownFields,
|
||||
extensionRegistry, tag)) {
|
||||
this.setUnknownFields(unknownFields.build());
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 8: {
|
||||
bitField0_ |= 0x00000001;
|
||||
traceId_ = input.readInt64();
|
||||
break;
|
||||
}
|
||||
case 16: {
|
||||
bitField0_ |= 0x00000002;
|
||||
parentId_ = input.readInt64();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private int bitField0_;
|
||||
|
||||
// optional int64 traceId = 1;
|
||||
private long traceId_ ;
|
||||
public boolean hasTraceId() {
|
||||
return ((bitField0_ & 0x00000001) == 0x00000001);
|
||||
}
|
||||
public long getTraceId() {
|
||||
return traceId_;
|
||||
}
|
||||
public Builder setTraceId(long value) {
|
||||
bitField0_ |= 0x00000001;
|
||||
traceId_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
public Builder clearTraceId() {
|
||||
bitField0_ = (bitField0_ & ~0x00000001);
|
||||
traceId_ = 0L;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
// optional int64 parentId = 2;
|
||||
private long parentId_ ;
|
||||
public boolean hasParentId() {
|
||||
return ((bitField0_ & 0x00000002) == 0x00000002);
|
||||
}
|
||||
public long getParentId() {
|
||||
return parentId_;
|
||||
}
|
||||
public Builder setParentId(long value) {
|
||||
bitField0_ |= 0x00000002;
|
||||
parentId_ = value;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
public Builder clearParentId() {
|
||||
bitField0_ = (bitField0_ & ~0x00000002);
|
||||
parentId_ = 0L;
|
||||
onChanged();
|
||||
return this;
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(builder_scope:RPCTInfo)
|
||||
}
|
||||
|
||||
static {
|
||||
defaultInstance = new RPCTInfo(true);
|
||||
defaultInstance.initFields();
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(class_scope:RPCTInfo)
|
||||
}
|
||||
|
||||
private static com.google.protobuf.Descriptors.Descriptor
|
||||
internal_static_RPCTInfo_descriptor;
|
||||
private static
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable
|
||||
internal_static_RPCTInfo_fieldAccessorTable;
|
||||
|
||||
public static com.google.protobuf.Descriptors.FileDescriptor
|
||||
getDescriptor() {
|
||||
return descriptor;
|
||||
}
|
||||
private static com.google.protobuf.Descriptors.FileDescriptor
|
||||
descriptor;
|
||||
static {
|
||||
java.lang.String[] descriptorData = {
|
||||
"\n\rTracing.proto\"-\n\010RPCTInfo\022\017\n\007traceId\030\001" +
|
||||
" \001(\003\022\020\n\010parentId\030\002 \001(\003B:\n*org.apache.had" +
|
||||
"oop.hbase.protobuf.generatedB\007TracingH\001\240" +
|
||||
"\001\001"
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
|
||||
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
|
||||
public com.google.protobuf.ExtensionRegistry assignDescriptors(
|
||||
com.google.protobuf.Descriptors.FileDescriptor root) {
|
||||
descriptor = root;
|
||||
internal_static_RPCTInfo_descriptor =
|
||||
getDescriptor().getMessageTypes().get(0);
|
||||
internal_static_RPCTInfo_fieldAccessorTable = new
|
||||
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
|
||||
internal_static_RPCTInfo_descriptor,
|
||||
new java.lang.String[] { "TraceId", "ParentId", },
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.class,
|
||||
org.apache.hadoop.hbase.protobuf.generated.Tracing.RPCTInfo.Builder.class);
|
||||
return null;
|
||||
}
|
||||
};
|
||||
com.google.protobuf.Descriptors.FileDescriptor
|
||||
.internalBuildGeneratedFileFrom(descriptorData,
|
||||
new com.google.protobuf.Descriptors.FileDescriptor[] {
|
||||
}, assigner);
|
||||
}
|
||||
|
||||
// @@protoc_insertion_point(outer_class_scope)
|
||||
}
|
|
@ -0,0 +1,89 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.trace;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configurable;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.cloudera.htrace.Span;
|
||||
import org.cloudera.htrace.SpanReceiver;
|
||||
import org.cloudera.htrace.Trace;
|
||||
import org.cloudera.htrace.impl.LocalFileSpanReceiver;
|
||||
|
||||
/**
|
||||
* Wraps the LocalFileSpanReceiver provided in
|
||||
* org.cloudera.htrace.impl.LocalFileSpanReceiver to read the file name
|
||||
* destination for spans from hbase-site.xml.
|
||||
*
|
||||
* The file path should be added as a property with name
|
||||
* "hbase.trace.spanreceiver.localfilespanreceiver.filename".
|
||||
*/
|
||||
public class HBaseLocalFileSpanReceiver implements SpanReceiver, Configurable {
|
||||
public static final Log LOG = LogFactory
|
||||
.getLog(HBaseLocalFileSpanReceiver.class);
|
||||
public static final String FILE_NAME_CONF_KEY = "hbase.trace.spanreceiver.localfilespanreceiver.filename";
|
||||
private Configuration conf;
|
||||
private LocalFileSpanReceiver rcvr;
|
||||
|
||||
@Override
|
||||
public Configuration getConf() {
|
||||
return conf;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setConf(Configuration arg0) {
|
||||
this.conf = arg0;
|
||||
// replace rcvr if it was already created
|
||||
if (rcvr != null) {
|
||||
try {
|
||||
rcvr.close();
|
||||
} catch (IOException e) {
|
||||
LOG.warn("Error closing LocalFileSpanReceiver.", e);
|
||||
}
|
||||
}
|
||||
try {
|
||||
rcvr = new LocalFileSpanReceiver(conf.get(FILE_NAME_CONF_KEY));
|
||||
} catch (IOException e) {
|
||||
Trace.removeReceiver(this);
|
||||
rcvr = null;
|
||||
LOG.warn(
|
||||
"Unable to initialize LocalFileSpanReceiver, removing owner (HBaseLocalFileSpanReceiver) from receiver list.",
|
||||
e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
try{
|
||||
if (rcvr != null) {
|
||||
rcvr.close();
|
||||
}
|
||||
} finally {
|
||||
rcvr = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void receiveSpan(Span span) {
|
||||
if (rcvr != null) {
|
||||
rcvr.receiveSpan(span);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,113 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.trace;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.cloudera.htrace.SpanReceiver;
|
||||
import org.cloudera.htrace.Trace;
|
||||
|
||||
/**
|
||||
* This class provides functions for reading the names of SpanReceivers from
|
||||
* hbase-site.xml, adding those SpanReceivers to the Tracer, and closing those
|
||||
* SpanReceivers when appropriate.
|
||||
*/
|
||||
public class SpanReceiverHost {
|
||||
public static final String SPAN_RECEIVERS_CONF_KEY = "hbase.trace.spanreceiver.classes";
|
||||
private static final Log LOG = LogFactory.getLog(SpanReceiverHost.class);
|
||||
private Collection<SpanReceiver> receivers;
|
||||
private Configuration conf;
|
||||
|
||||
public SpanReceiverHost(Configuration conf) {
|
||||
receivers = new HashSet<SpanReceiver>();
|
||||
this.conf = conf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the names of classes specified in the
|
||||
* "hbase.trace.spanreceiver.classes" property and instantiates and registers
|
||||
* them with the Tracer as SpanReceiver's.
|
||||
*
|
||||
* The nullary constructor is called during construction, but if the classes
|
||||
* specified implement the Configurable interface, setConfiguration() will be
|
||||
* called on them. This allows SpanReceivers to use values from
|
||||
* hbase-site.xml. See
|
||||
* {@link org.apache.hadoop.hbase.trace.HBaseLocalFileSpanReceiver} for an
|
||||
* example.
|
||||
*/
|
||||
public void loadSpanReceivers() {
|
||||
Class<?> implClass = null;
|
||||
String[] receiverNames = conf.getStrings(SPAN_RECEIVERS_CONF_KEY);
|
||||
if (receiverNames == null || receiverNames.length == 0) {
|
||||
return;
|
||||
}
|
||||
for (String className : receiverNames) {
|
||||
className = className.trim();
|
||||
|
||||
try {
|
||||
implClass = Class.forName(className);
|
||||
receivers.add(loadInstance(implClass));
|
||||
LOG.info("SpanReceiver " + className + " was loaded successfully.");
|
||||
} catch (ClassNotFoundException e) {
|
||||
LOG.warn("Class " + className + " cannot be found. " + e.getMessage());
|
||||
} catch (IOException e) {
|
||||
LOG.warn("Load SpanReceiver " + className + " failed. "
|
||||
+ e.getMessage());
|
||||
}
|
||||
}
|
||||
for (SpanReceiver rcvr : receivers) {
|
||||
Trace.addReceiver(rcvr);
|
||||
}
|
||||
}
|
||||
|
||||
private SpanReceiver loadInstance(Class<?> implClass)
|
||||
throws IOException {
|
||||
SpanReceiver impl;
|
||||
try {
|
||||
Object o = ReflectionUtils.newInstance(implClass, conf);
|
||||
impl = (SpanReceiver)o;
|
||||
} catch (SecurityException e) {
|
||||
throw new IOException(e);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IOException(e);
|
||||
} catch (RuntimeException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
|
||||
return impl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls close() on all SpanReceivers created by this SpanReceiverHost.
|
||||
*/
|
||||
public void closeReceivers() {
|
||||
for (SpanReceiver rcvr : receivers) {
|
||||
try {
|
||||
rcvr.close();
|
||||
} catch (IOException e) {
|
||||
LOG.warn("Unable to close SpanReceiver correctly: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
option java_package = "org.apache.hadoop.hbase.protobuf.generated";
|
||||
option java_outer_classname = "Tracing";
|
||||
option java_generate_equals_and_hash = true;
|
||||
option optimize_for = SPEED;
|
||||
|
||||
//Used to pass through the information necessary to continue
|
||||
//a trace after an RPC is made. All we need is the traceid
|
||||
//(so we know the overarching trace this message is a part of), and
|
||||
//the id of the current span when this message was sent, so we know
|
||||
//what span caused the new span we will create when this message is received.
|
||||
message RPCTInfo {
|
||||
optional int64 traceId = 1;
|
||||
optional int64 parentId = 2;
|
||||
}
|
|
@ -0,0 +1,119 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.trace;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.MediumTests;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.cloudera.htrace.Sampler;
|
||||
import org.cloudera.htrace.Span;
|
||||
import org.cloudera.htrace.Trace;
|
||||
import org.cloudera.htrace.TraceTree;
|
||||
import org.cloudera.htrace.impl.POJOSpanReceiver;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
import com.google.common.collect.Multimap;
|
||||
|
||||
@Category(MediumTests.class)
|
||||
public class TestHTraceHooks {
|
||||
|
||||
private static final byte[] FAMILY_BYTES = "family".getBytes();
|
||||
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
|
||||
private static final POJOSpanReceiver rcvr = new POJOSpanReceiver();
|
||||
|
||||
@BeforeClass
|
||||
public static void before() throws Exception {
|
||||
TEST_UTIL.startMiniCluster(2, 3);
|
||||
Trace.addReceiver(rcvr);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void after() throws Exception {
|
||||
TEST_UTIL.shutdownMiniCluster();
|
||||
Trace.removeReceiver(rcvr);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTraceCreateTable() throws Exception {
|
||||
Span tableCreationSpan = Trace.startSpan("creating table", Sampler.ALWAYS);
|
||||
HTable table;
|
||||
try {
|
||||
table = TEST_UTIL.createTable("table".getBytes(),
|
||||
FAMILY_BYTES);
|
||||
} finally {
|
||||
tableCreationSpan.stop();
|
||||
}
|
||||
|
||||
Collection<Span> spans = rcvr.getSpans();
|
||||
TraceTree traceTree = new TraceTree(spans);
|
||||
Collection<Span> roots = traceTree.getRoots();
|
||||
|
||||
assertEquals(1, roots.size());
|
||||
Span createTableRoot = roots.iterator().next();
|
||||
|
||||
assertEquals("creating table", createTableRoot.getDescription());
|
||||
Multimap<Long, Span> spansByParentIdMap = traceTree
|
||||
.getSpansByParentIdMap();
|
||||
|
||||
int startsWithHandlingCount = 0;
|
||||
|
||||
for (Span s : spansByParentIdMap.get(createTableRoot.getSpanId())) {
|
||||
if (s.getDescription().startsWith("handling")) {
|
||||
startsWithHandlingCount++;
|
||||
}
|
||||
}
|
||||
|
||||
assertTrue(startsWithHandlingCount > 3);
|
||||
assertTrue(spansByParentIdMap.get(createTableRoot.getSpanId()).size() > 3);
|
||||
assertTrue(spans.size() > 5);
|
||||
|
||||
Put put = new Put("row".getBytes());
|
||||
put.add(FAMILY_BYTES, "col".getBytes(), "value".getBytes());
|
||||
|
||||
Span putSpan = Trace.startSpan("doing put", Sampler.ALWAYS);
|
||||
try {
|
||||
table.put(put);
|
||||
} finally {
|
||||
putSpan.stop();
|
||||
}
|
||||
|
||||
spans = rcvr.getSpans();
|
||||
traceTree = new TraceTree(spans);
|
||||
roots = traceTree.getRoots();
|
||||
|
||||
assertEquals(2, roots.size());
|
||||
Span putRoot = null;
|
||||
for (Span root : roots) {
|
||||
if (root.getDescription().equals("doing put")) {
|
||||
putRoot = root;
|
||||
}
|
||||
}
|
||||
|
||||
assertNotNull(putRoot);
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue