HBASE-26471 Move tracing semantic attributes to their own class (#3896)
Signed-off-by: Duo Zhang <zhangduo@apache.org> Signed-off-by: Tak Lon (Stephen) Wu <taklwu@apache.org>
This commit is contained in:
parent
061ccffb9e
commit
d1c68a2d5c
|
@ -1,4 +1,4 @@
|
||||||
/**
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -25,6 +25,7 @@ import static org.apache.hadoop.hbase.client.ConnectionUtils.NO_NONCE_GENERATOR;
|
||||||
import static org.apache.hadoop.hbase.client.ConnectionUtils.getStubKey;
|
import static org.apache.hadoop.hbase.client.ConnectionUtils.getStubKey;
|
||||||
import static org.apache.hadoop.hbase.client.MetricsConnection.CLIENT_SIDE_METRICS_ENABLED_KEY;
|
import static org.apache.hadoop.hbase.client.MetricsConnection.CLIENT_SIDE_METRICS_ENABLED_KEY;
|
||||||
import static org.apache.hadoop.hbase.client.NonceGenerator.CLIENT_NONCES_ENABLED_KEY;
|
import static org.apache.hadoop.hbase.client.NonceGenerator.CLIENT_NONCES_ENABLED_KEY;
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.SERVER_NAME_KEY;
|
||||||
import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
|
import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
|
||||||
|
|
||||||
import io.opentelemetry.api.trace.Span;
|
import io.opentelemetry.api.trace.Span;
|
||||||
|
@ -371,7 +372,7 @@ class AsyncConnectionImpl implements AsyncConnection {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Hbck getHbckInternal(ServerName masterServer) {
|
private Hbck getHbckInternal(ServerName masterServer) {
|
||||||
Span.current().setAttribute(TraceUtil.SERVER_NAME_KEY, masterServer.getServerName());
|
Span.current().setAttribute(SERVER_NAME_KEY, masterServer.getServerName());
|
||||||
// we will not create a new connection when creating a new protobuf stub, and for hbck there
|
// we will not create a new connection when creating a new protobuf stub, and for hbck there
|
||||||
// will be no performance consideration, so for simplification we will create a new stub every
|
// will be no performance consideration, so for simplification we will create a new stub every
|
||||||
// time instead of caching the stub here.
|
// time instead of caching the stub here.
|
||||||
|
|
|
@ -18,8 +18,8 @@
|
||||||
package org.apache.hadoop.hbase.client;
|
package org.apache.hadoop.hbase.client;
|
||||||
|
|
||||||
import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
|
import static org.apache.hadoop.hbase.TableName.META_TABLE_NAME;
|
||||||
import static org.apache.hadoop.hbase.trace.TraceUtil.REGION_NAMES_KEY;
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.REGION_NAMES_KEY;
|
||||||
import static org.apache.hadoop.hbase.trace.TraceUtil.SERVER_NAME_KEY;
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.SERVER_NAME_KEY;
|
||||||
import static org.apache.hadoop.hbase.trace.TraceUtil.createSpan;
|
import static org.apache.hadoop.hbase.trace.TraceUtil.createSpan;
|
||||||
import static org.apache.hadoop.hbase.trace.TraceUtil.createTableSpan;
|
import static org.apache.hadoop.hbase.trace.TraceUtil.createTableSpan;
|
||||||
import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
|
import static org.apache.hadoop.hbase.util.FutureUtils.addListener;
|
||||||
|
|
|
@ -78,6 +78,7 @@ import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
|
||||||
import org.apache.hadoop.hbase.log.HBaseMarkers;
|
import org.apache.hadoop.hbase.log.HBaseMarkers;
|
||||||
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
|
import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
|
||||||
import org.apache.hadoop.hbase.security.User;
|
import org.apache.hadoop.hbase.security.User;
|
||||||
|
import org.apache.hadoop.hbase.trace.HBaseSemanticAttributes;
|
||||||
import org.apache.hadoop.hbase.trace.TraceUtil;
|
import org.apache.hadoop.hbase.trace.TraceUtil;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||||
|
@ -504,7 +505,7 @@ class ConnectionImplementation implements ClusterConnection, Closeable {
|
||||||
return MasterProtos.HbckService.newBlockingStub(channel);
|
return MasterProtos.HbckService.newBlockingStub(channel);
|
||||||
}), rpcControllerFactory);
|
}), rpcControllerFactory);
|
||||||
}, () -> TraceUtil.createSpan(this.getClass().getSimpleName() + ".getHbck")
|
}, () -> TraceUtil.createSpan(this.getClass().getSimpleName() + ".getHbck")
|
||||||
.setAttribute(TraceUtil.SERVER_NAME_KEY, masterServer.getServerName()));
|
.setAttribute(HBaseSemanticAttributes.SERVER_NAME_KEY, masterServer.getServerName()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
/**
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -20,6 +20,10 @@ package org.apache.hadoop.hbase.ipc;
|
||||||
|
|
||||||
import static org.apache.hadoop.hbase.ipc.IPCUtil.toIOE;
|
import static org.apache.hadoop.hbase.ipc.IPCUtil.toIOE;
|
||||||
import static org.apache.hadoop.hbase.ipc.IPCUtil.wrapException;
|
import static org.apache.hadoop.hbase.ipc.IPCUtil.wrapException;
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.REMOTE_HOST_KEY;
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.REMOTE_PORT_KEY;
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.RPC_METHOD_KEY;
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.RPC_SERVICE_KEY;
|
||||||
|
|
||||||
import io.opentelemetry.api.trace.Span;
|
import io.opentelemetry.api.trace.Span;
|
||||||
import io.opentelemetry.api.trace.StatusCode;
|
import io.opentelemetry.api.trace.StatusCode;
|
||||||
|
@ -396,10 +400,10 @@ public abstract class AbstractRpcClient<T extends RpcConnection> implements RpcC
|
||||||
final Message param, Message returnType, final User ticket, final Address addr,
|
final Message param, Message returnType, final User ticket, final Address addr,
|
||||||
final RpcCallback<Message> callback) {
|
final RpcCallback<Message> callback) {
|
||||||
Span span = TraceUtil.createClientSpan("RpcClient.callMethod")
|
Span span = TraceUtil.createClientSpan("RpcClient.callMethod")
|
||||||
.setAttribute(TraceUtil.RPC_SERVICE_KEY, md.getService().getName())
|
.setAttribute(RPC_SERVICE_KEY, md.getService().getName())
|
||||||
.setAttribute(TraceUtil.RPC_METHOD_KEY, md.getName())
|
.setAttribute(RPC_METHOD_KEY, md.getName())
|
||||||
.setAttribute(TraceUtil.REMOTE_HOST_KEY, addr.getHostName())
|
.setAttribute(REMOTE_HOST_KEY, addr.getHostName())
|
||||||
.setAttribute(TraceUtil.REMOTE_PORT_KEY, addr.getPort());
|
.setAttribute(REMOTE_PORT_KEY, addr.getPort());
|
||||||
try (Scope scope = span.makeCurrent()) {
|
try (Scope scope = span.makeCurrent()) {
|
||||||
final MetricsConnection.CallStats cs = MetricsConnection.newCallStats();
|
final MetricsConnection.CallStats cs = MetricsConnection.newCallStats();
|
||||||
cs.setStartTime(EnvironmentEdgeManager.currentTime());
|
cs.setStartTime(EnvironmentEdgeManager.currentTime());
|
||||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.Waiter;
|
||||||
import org.apache.hadoop.hbase.security.UserProvider;
|
import org.apache.hadoop.hbase.security.UserProvider;
|
||||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||||
import org.apache.hadoop.hbase.trace.TraceUtil;
|
import org.apache.hadoop.hbase.trace.HBaseSemanticAttributes;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.ClassRule;
|
import org.junit.ClassRule;
|
||||||
|
@ -87,7 +87,9 @@ public class TestAsyncConnectionTracing {
|
||||||
.filter(s -> s.getName().equals("AsyncConnection." + methodName)).findFirst().get();
|
.filter(s -> s.getName().equals("AsyncConnection." + methodName)).findFirst().get();
|
||||||
assertEquals(StatusCode.OK, data.getStatus().getStatusCode());
|
assertEquals(StatusCode.OK, data.getStatus().getStatusCode());
|
||||||
if (serverName != null) {
|
if (serverName != null) {
|
||||||
assertEquals(serverName.getServerName(), data.getAttributes().get(TraceUtil.SERVER_NAME_KEY));
|
assertEquals(
|
||||||
|
serverName.getServerName(),
|
||||||
|
data.getAttributes().get(HBaseSemanticAttributes.SERVER_NAME_KEY));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.Waiter;
|
||||||
import org.apache.hadoop.hbase.security.UserProvider;
|
import org.apache.hadoop.hbase.security.UserProvider;
|
||||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||||
import org.apache.hadoop.hbase.trace.TraceUtil;
|
import org.apache.hadoop.hbase.trace.HBaseSemanticAttributes;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.ClassRule;
|
import org.junit.ClassRule;
|
||||||
|
@ -107,7 +107,7 @@ public class TestAsyncRegionLocatorTracing {
|
||||||
conn.getLocator().clearCache(sn);
|
conn.getLocator().clearCache(sn);
|
||||||
SpanData span = waitSpan("AsyncRegionLocator.clearCache");
|
SpanData span = waitSpan("AsyncRegionLocator.clearCache");
|
||||||
assertEquals(StatusCode.OK, span.getStatus().getStatusCode());
|
assertEquals(StatusCode.OK, span.getStatus().getStatusCode());
|
||||||
assertEquals(sn.toString(), span.getAttributes().get(TraceUtil.SERVER_NAME_KEY));
|
assertEquals(sn.toString(), span.getAttributes().get(HBaseSemanticAttributes.SERVER_NAME_KEY));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -116,9 +116,9 @@ public class TestAsyncRegionLocatorTracing {
|
||||||
SpanData span = waitSpan("AsyncRegionLocator.clearCache");
|
SpanData span = waitSpan("AsyncRegionLocator.clearCache");
|
||||||
assertEquals(StatusCode.OK, span.getStatus().getStatusCode());
|
assertEquals(StatusCode.OK, span.getStatus().getStatusCode());
|
||||||
assertEquals(TableName.META_TABLE_NAME.getNamespaceAsString(),
|
assertEquals(TableName.META_TABLE_NAME.getNamespaceAsString(),
|
||||||
span.getAttributes().get(TraceUtil.NAMESPACE_KEY));
|
span.getAttributes().get(HBaseSemanticAttributes.NAMESPACE_KEY));
|
||||||
assertEquals(TableName.META_TABLE_NAME.getNameAsString(),
|
assertEquals(TableName.META_TABLE_NAME.getNameAsString(),
|
||||||
span.getAttributes().get(TraceUtil.TABLE_KEY));
|
span.getAttributes().get(HBaseSemanticAttributes.TABLE_KEY));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -128,10 +128,10 @@ public class TestAsyncRegionLocatorTracing {
|
||||||
SpanData span = waitSpan("AsyncRegionLocator.getRegionLocation");
|
SpanData span = waitSpan("AsyncRegionLocator.getRegionLocation");
|
||||||
assertEquals(StatusCode.OK, span.getStatus().getStatusCode());
|
assertEquals(StatusCode.OK, span.getStatus().getStatusCode());
|
||||||
assertEquals(TableName.META_TABLE_NAME.getNamespaceAsString(),
|
assertEquals(TableName.META_TABLE_NAME.getNamespaceAsString(),
|
||||||
span.getAttributes().get(TraceUtil.NAMESPACE_KEY));
|
span.getAttributes().get(HBaseSemanticAttributes.NAMESPACE_KEY));
|
||||||
assertEquals(TableName.META_TABLE_NAME.getNameAsString(),
|
assertEquals(TableName.META_TABLE_NAME.getNameAsString(),
|
||||||
span.getAttributes().get(TraceUtil.TABLE_KEY));
|
span.getAttributes().get(HBaseSemanticAttributes.TABLE_KEY));
|
||||||
List<String> regionNames = span.getAttributes().get(TraceUtil.REGION_NAMES_KEY);
|
List<String> regionNames = span.getAttributes().get(HBaseSemanticAttributes.REGION_NAMES_KEY);
|
||||||
assertEquals(1, regionNames.size());
|
assertEquals(1, regionNames.size());
|
||||||
assertEquals(locs.getDefaultRegionLocation().getRegion().getRegionNameAsString(),
|
assertEquals(locs.getDefaultRegionLocation().getRegion().getRegionNameAsString(),
|
||||||
regionNames.get(0));
|
regionNames.get(0));
|
||||||
|
@ -144,10 +144,10 @@ public class TestAsyncRegionLocatorTracing {
|
||||||
SpanData span = waitSpan("AsyncRegionLocator.getRegionLocations");
|
SpanData span = waitSpan("AsyncRegionLocator.getRegionLocations");
|
||||||
assertEquals(StatusCode.OK, span.getStatus().getStatusCode());
|
assertEquals(StatusCode.OK, span.getStatus().getStatusCode());
|
||||||
assertEquals(TableName.META_TABLE_NAME.getNamespaceAsString(),
|
assertEquals(TableName.META_TABLE_NAME.getNamespaceAsString(),
|
||||||
span.getAttributes().get(TraceUtil.NAMESPACE_KEY));
|
span.getAttributes().get(HBaseSemanticAttributes.NAMESPACE_KEY));
|
||||||
assertEquals(TableName.META_TABLE_NAME.getNameAsString(),
|
assertEquals(TableName.META_TABLE_NAME.getNameAsString(),
|
||||||
span.getAttributes().get(TraceUtil.TABLE_KEY));
|
span.getAttributes().get(HBaseSemanticAttributes.TABLE_KEY));
|
||||||
List<String> regionNames = span.getAttributes().get(TraceUtil.REGION_NAMES_KEY);
|
List<String> regionNames = span.getAttributes().get(HBaseSemanticAttributes.REGION_NAMES_KEY);
|
||||||
assertEquals(3, regionNames.size());
|
assertEquals(3, regionNames.size());
|
||||||
for (int i = 0; i < 3; i++) {
|
for (int i = 0; i < 3; i++) {
|
||||||
assertEquals(locs.getRegionLocation(i).getRegion().getRegionNameAsString(),
|
assertEquals(locs.getRegionLocation(i).getRegion().getRegionNameAsString(),
|
||||||
|
|
|
@ -17,6 +17,8 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.client;
|
package org.apache.hadoop.hbase.client;
|
||||||
|
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.NAMESPACE_KEY;
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.TABLE_KEY;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.mockito.ArgumentMatchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.ArgumentMatchers.anyInt;
|
import static org.mockito.ArgumentMatchers.anyInt;
|
||||||
|
@ -47,7 +49,6 @@ import org.apache.hadoop.hbase.ipc.HBaseRpcController;
|
||||||
import org.apache.hadoop.hbase.security.UserProvider;
|
import org.apache.hadoop.hbase.security.UserProvider;
|
||||||
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
import org.apache.hadoop.hbase.testclassification.ClientTests;
|
||||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||||
import org.apache.hadoop.hbase.trace.TraceUtil;
|
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
@ -227,9 +228,8 @@ public class TestAsyncTableTracing {
|
||||||
.filter(s -> s.getName().equals("AsyncTable." + methodName)).findFirst().get();
|
.filter(s -> s.getName().equals("AsyncTable." + methodName)).findFirst().get();
|
||||||
assertEquals(StatusCode.OK, data.getStatus().getStatusCode());
|
assertEquals(StatusCode.OK, data.getStatus().getStatusCode());
|
||||||
TableName tableName = table.getName();
|
TableName tableName = table.getName();
|
||||||
assertEquals(tableName.getNamespaceAsString(),
|
assertEquals(tableName.getNamespaceAsString(), data.getAttributes().get(NAMESPACE_KEY));
|
||||||
data.getAttributes().get(TraceUtil.NAMESPACE_KEY));
|
assertEquals(tableName.getNameAsString(), data.getAttributes().get(TABLE_KEY));
|
||||||
assertEquals(tableName.getNameAsString(), data.getAttributes().get(TraceUtil.TABLE_KEY));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -23,6 +23,7 @@ import io.opentelemetry.api.trace.SpanKind;
|
||||||
import io.opentelemetry.api.trace.StatusCode;
|
import io.opentelemetry.api.trace.StatusCode;
|
||||||
import io.opentelemetry.sdk.testing.junit4.OpenTelemetryRule;
|
import io.opentelemetry.sdk.testing.junit4.OpenTelemetryRule;
|
||||||
import io.opentelemetry.sdk.trace.data.SpanData;
|
import io.opentelemetry.sdk.trace.data.SpanData;
|
||||||
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -33,7 +34,7 @@ import org.apache.hadoop.hbase.RegionLocations;
|
||||||
import org.apache.hadoop.hbase.ServerName;
|
import org.apache.hadoop.hbase.ServerName;
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.Waiter;
|
import org.apache.hadoop.hbase.Waiter;
|
||||||
import org.apache.hadoop.hbase.trace.TraceUtil;
|
import org.apache.hadoop.hbase.trace.HBaseSemanticAttributes;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.ClassRule;
|
import org.junit.ClassRule;
|
||||||
|
|
||||||
|
@ -68,16 +69,20 @@ public class TestTracingBase {
|
||||||
|
|
||||||
if (serverName != null) {
|
if (serverName != null) {
|
||||||
Optional<SpanData> foundServerName =
|
Optional<SpanData> foundServerName =
|
||||||
TRACE_RULE.getSpans().stream().filter(s -> s.getName().equals(expectedSpanName)).filter(
|
TRACE_RULE.getSpans().stream()
|
||||||
s -> serverName.getServerName().equals(s.getAttributes().get(TraceUtil.SERVER_NAME_KEY)))
|
.filter(s -> s.getName().equals(expectedSpanName))
|
||||||
|
.filter(s -> Objects.equals(
|
||||||
|
serverName.getServerName(),
|
||||||
|
s.getAttributes().get(HBaseSemanticAttributes.SERVER_NAME_KEY)))
|
||||||
.findAny();
|
.findAny();
|
||||||
assertTrue(foundServerName.isPresent());
|
assertTrue(foundServerName.isPresent());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (tableName != null) {
|
if (tableName != null) {
|
||||||
assertEquals(tableName.getNamespaceAsString(),
|
assertEquals(tableName.getNamespaceAsString(),
|
||||||
data.getAttributes().get(TraceUtil.NAMESPACE_KEY));
|
data.getAttributes().get(HBaseSemanticAttributes.NAMESPACE_KEY));
|
||||||
assertEquals(tableName.getNameAsString(), data.getAttributes().get(TraceUtil.TABLE_KEY));
|
assertEquals(tableName.getNameAsString(),
|
||||||
|
data.getAttributes().get(HBaseSemanticAttributes.TABLE_KEY));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,48 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.hbase.trace;
|
||||||
|
|
||||||
|
import io.opentelemetry.api.common.AttributeKey;
|
||||||
|
import io.opentelemetry.semconv.trace.attributes.SemanticAttributes;
|
||||||
|
import java.util.List;
|
||||||
|
import org.apache.yetus.audience.InterfaceAudience;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The constants in this class correspond with the guidance outlined by the OpenTelemetry
|
||||||
|
* <a href="https://github.com/open-telemetry/opentelemetry-specification/tree/main/specification/trace/semantic_conventions">Semantic Conventions</a>.
|
||||||
|
*/
|
||||||
|
@InterfaceAudience.Private
|
||||||
|
public final class HBaseSemanticAttributes {
|
||||||
|
public static final AttributeKey<String> NAMESPACE_KEY = SemanticAttributes.DB_HBASE_NAMESPACE;
|
||||||
|
public static final AttributeKey<String> TABLE_KEY = AttributeKey.stringKey("db.hbase.table");
|
||||||
|
public static final AttributeKey<List<String>> REGION_NAMES_KEY =
|
||||||
|
AttributeKey.stringArrayKey("db.hbase.regions");
|
||||||
|
public static final AttributeKey<String> RPC_SERVICE_KEY =
|
||||||
|
AttributeKey.stringKey("db.hbase.rpc.service");
|
||||||
|
public static final AttributeKey<String> RPC_METHOD_KEY =
|
||||||
|
AttributeKey.stringKey("db.hbase.rpc.method");
|
||||||
|
public static final AttributeKey<String> SERVER_NAME_KEY =
|
||||||
|
AttributeKey.stringKey("db.hbase.server.name");
|
||||||
|
public static final AttributeKey<String> REMOTE_HOST_KEY = SemanticAttributes.NET_PEER_NAME;
|
||||||
|
public static final AttributeKey<Long> REMOTE_PORT_KEY = SemanticAttributes.NET_PEER_PORT;
|
||||||
|
public static final AttributeKey<Boolean> ROW_LOCK_READ_LOCK_KEY =
|
||||||
|
AttributeKey.booleanKey("db.hbase.rowlock.readlock");
|
||||||
|
public static final AttributeKey<String> WAL_IMPL = AttributeKey.stringKey("db.hbase.wal.impl");
|
||||||
|
|
||||||
|
private HBaseSemanticAttributes() { }
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
/**
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -17,15 +17,15 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.trace;
|
package org.apache.hadoop.hbase.trace;
|
||||||
|
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.NAMESPACE_KEY;
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.TABLE_KEY;
|
||||||
import io.opentelemetry.api.GlobalOpenTelemetry;
|
import io.opentelemetry.api.GlobalOpenTelemetry;
|
||||||
import io.opentelemetry.api.common.AttributeKey;
|
|
||||||
import io.opentelemetry.api.trace.Span;
|
import io.opentelemetry.api.trace.Span;
|
||||||
import io.opentelemetry.api.trace.SpanKind;
|
import io.opentelemetry.api.trace.SpanKind;
|
||||||
import io.opentelemetry.api.trace.StatusCode;
|
import io.opentelemetry.api.trace.StatusCode;
|
||||||
import io.opentelemetry.api.trace.Tracer;
|
import io.opentelemetry.api.trace.Tracer;
|
||||||
import io.opentelemetry.context.Context;
|
import io.opentelemetry.context.Context;
|
||||||
import io.opentelemetry.context.Scope;
|
import io.opentelemetry.context.Scope;
|
||||||
import io.opentelemetry.semconv.trace.attributes.SemanticAttributes;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
@ -38,31 +38,6 @@ import org.apache.yetus.audience.InterfaceAudience;
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public final class TraceUtil {
|
public final class TraceUtil {
|
||||||
|
|
||||||
public static final AttributeKey<String> NAMESPACE_KEY = SemanticAttributes.DB_HBASE_NAMESPACE;
|
|
||||||
|
|
||||||
public static final AttributeKey<String> TABLE_KEY = AttributeKey.stringKey("db.hbase.table");
|
|
||||||
|
|
||||||
public static final AttributeKey<List<String>> REGION_NAMES_KEY =
|
|
||||||
AttributeKey.stringArrayKey("db.hbase.regions");
|
|
||||||
|
|
||||||
public static final AttributeKey<String> RPC_SERVICE_KEY =
|
|
||||||
AttributeKey.stringKey("db.hbase.rpc.service");
|
|
||||||
|
|
||||||
public static final AttributeKey<String> RPC_METHOD_KEY =
|
|
||||||
AttributeKey.stringKey("db.hbase.rpc.method");
|
|
||||||
|
|
||||||
public static final AttributeKey<String> SERVER_NAME_KEY =
|
|
||||||
AttributeKey.stringKey("db.hbase.server.name");
|
|
||||||
|
|
||||||
public static final AttributeKey<String> REMOTE_HOST_KEY = SemanticAttributes.NET_PEER_NAME;
|
|
||||||
|
|
||||||
public static final AttributeKey<Long> REMOTE_PORT_KEY = SemanticAttributes.NET_PEER_PORT;
|
|
||||||
|
|
||||||
public static final AttributeKey<Boolean> ROW_LOCK_READ_LOCK_KEY =
|
|
||||||
AttributeKey.booleanKey("db.hbase.rowlock.readlock");
|
|
||||||
|
|
||||||
public static final AttributeKey<String> WAL_IMPL = AttributeKey.stringKey("db.hbase.wal.impl");
|
|
||||||
|
|
||||||
private TraceUtil() {
|
private TraceUtil() {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,7 +56,8 @@ public final class TraceUtil {
|
||||||
* Create a {@link SpanKind#INTERNAL} span and set table related attributes.
|
* Create a {@link SpanKind#INTERNAL} span and set table related attributes.
|
||||||
*/
|
*/
|
||||||
public static Span createTableSpan(String spanName, TableName tableName) {
|
public static Span createTableSpan(String spanName, TableName tableName) {
|
||||||
return createSpan(spanName).setAttribute(NAMESPACE_KEY, tableName.getNamespaceAsString())
|
return createSpan(spanName)
|
||||||
|
.setAttribute(NAMESPACE_KEY, tableName.getNamespaceAsString())
|
||||||
.setAttribute(TABLE_KEY, tableName.getNameAsString());
|
.setAttribute(TABLE_KEY, tableName.getNameAsString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
/**
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -17,6 +17,8 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.ipc;
|
package org.apache.hadoop.hbase.ipc;
|
||||||
|
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.RPC_METHOD_KEY;
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.RPC_SERVICE_KEY;
|
||||||
import io.opentelemetry.api.trace.Span;
|
import io.opentelemetry.api.trace.Span;
|
||||||
import io.opentelemetry.api.trace.StatusCode;
|
import io.opentelemetry.api.trace.StatusCode;
|
||||||
import io.opentelemetry.context.Context;
|
import io.opentelemetry.context.Context;
|
||||||
|
@ -124,8 +126,8 @@ public class CallRunner {
|
||||||
String methodName = getMethodName();
|
String methodName = getMethodName();
|
||||||
Span span = TraceUtil.getGlobalTracer().spanBuilder("RpcServer.callMethod")
|
Span span = TraceUtil.getGlobalTracer().spanBuilder("RpcServer.callMethod")
|
||||||
.setParent(Context.current().with(((ServerCall<?>) call).getSpan())).startSpan()
|
.setParent(Context.current().with(((ServerCall<?>) call).getSpan())).startSpan()
|
||||||
.setAttribute(TraceUtil.RPC_SERVICE_KEY, serviceName)
|
.setAttribute(RPC_SERVICE_KEY, serviceName)
|
||||||
.setAttribute(TraceUtil.RPC_METHOD_KEY, methodName);
|
.setAttribute(RPC_METHOD_KEY, methodName);
|
||||||
try (Scope traceScope = span.makeCurrent()) {
|
try (Scope traceScope = span.makeCurrent()) {
|
||||||
if (!this.rpcServer.isStarted()) {
|
if (!this.rpcServer.isStarted()) {
|
||||||
InetSocketAddress address = rpcServer.getListenerAddress();
|
InetSocketAddress address = rpcServer.getListenerAddress();
|
||||||
|
|
|
@ -19,6 +19,8 @@ package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import static org.apache.hadoop.hbase.HConstants.REPLICATION_SCOPE_LOCAL;
|
import static org.apache.hadoop.hbase.HConstants.REPLICATION_SCOPE_LOCAL;
|
||||||
import static org.apache.hadoop.hbase.regionserver.HStoreFile.MAJOR_COMPACTION_KEY;
|
import static org.apache.hadoop.hbase.regionserver.HStoreFile.MAJOR_COMPACTION_KEY;
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.REGION_NAMES_KEY;
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.ROW_LOCK_READ_LOCK_KEY;
|
||||||
import static org.apache.hadoop.hbase.util.ConcurrentMapUtils.computeIfAbsent;
|
import static org.apache.hadoop.hbase.util.ConcurrentMapUtils.computeIfAbsent;
|
||||||
|
|
||||||
import edu.umd.cs.findbugs.annotations.Nullable;
|
import edu.umd.cs.findbugs.annotations.Nullable;
|
||||||
|
@ -6587,8 +6589,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
|
||||||
}
|
}
|
||||||
|
|
||||||
Span createRegionSpan(String name) {
|
Span createRegionSpan(String name) {
|
||||||
return TraceUtil.createSpan(name).setAttribute(TraceUtil.REGION_NAMES_KEY,
|
return TraceUtil.createSpan(name).setAttribute(REGION_NAMES_KEY,
|
||||||
Arrays.asList(getRegionInfo().getRegionNameAsString()));
|
Collections.singletonList(getRegionInfo().getRegionNameAsString()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// will be override in tests
|
// will be override in tests
|
||||||
|
@ -6675,7 +6677,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
|
||||||
private RowLock getRowLock(byte[] row, boolean readLock, final RowLock prevRowLock)
|
private RowLock getRowLock(byte[] row, boolean readLock, final RowLock prevRowLock)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return TraceUtil.trace(() -> getRowLockInternal(row, readLock, prevRowLock),
|
return TraceUtil.trace(() -> getRowLockInternal(row, readLock, prevRowLock),
|
||||||
() -> createRegionSpan("Region.getRowLock").setAttribute(TraceUtil.ROW_LOCK_READ_LOCK_KEY,
|
() -> createRegionSpan("Region.getRowLock").setAttribute(ROW_LOCK_READ_LOCK_KEY,
|
||||||
readLock));
|
readLock));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
/**
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver.wal;
|
||||||
import static org.apache.hadoop.hbase.regionserver.wal.WALActionsListener.RollRequestReason.ERROR;
|
import static org.apache.hadoop.hbase.regionserver.wal.WALActionsListener.RollRequestReason.ERROR;
|
||||||
import static org.apache.hadoop.hbase.regionserver.wal.WALActionsListener.RollRequestReason.LOW_REPLICATION;
|
import static org.apache.hadoop.hbase.regionserver.wal.WALActionsListener.RollRequestReason.LOW_REPLICATION;
|
||||||
import static org.apache.hadoop.hbase.regionserver.wal.WALActionsListener.RollRequestReason.SLOW_SYNC;
|
import static org.apache.hadoop.hbase.regionserver.wal.WALActionsListener.RollRequestReason.SLOW_SYNC;
|
||||||
|
import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.WAL_IMPL;
|
||||||
import static org.apache.hadoop.hbase.wal.AbstractFSWALProvider.WAL_FILE_NAME_DELIMITER;
|
import static org.apache.hadoop.hbase.wal.AbstractFSWALProvider.WAL_FILE_NAME_DELIMITER;
|
||||||
import static org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
|
import static org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkArgument;
|
||||||
import static org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
|
import static org.apache.hbase.thirdparty.com.google.common.base.Preconditions.checkNotNull;
|
||||||
|
@ -797,7 +798,7 @@ public abstract class AbstractFSWAL<W extends WriterBase> implements WAL {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Span createSpan(String name) {
|
private Span createSpan(String name) {
|
||||||
return TraceUtil.createSpan(name).setAttribute(TraceUtil.WAL_IMPL, implClassName);
|
return TraceUtil.createSpan(name).setAttribute(WAL_IMPL, implClassName);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.KeyValue;
|
||||||
import org.apache.hadoop.hbase.Server;
|
import org.apache.hadoop.hbase.Server;
|
||||||
import org.apache.hadoop.hbase.Waiter;
|
import org.apache.hadoop.hbase.Waiter;
|
||||||
import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
|
import org.apache.hadoop.hbase.ipc.RpcServer.BlockingServiceAndInterface;
|
||||||
import org.apache.hadoop.hbase.trace.TraceUtil;
|
import org.apache.hadoop.hbase.trace.HBaseSemanticAttributes;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.io.compress.GzipCodec;
|
import org.apache.hadoop.io.compress.GzipCodec;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
@ -459,11 +459,15 @@ public abstract class AbstractTestIPC {
|
||||||
private void assertRpcAttribute(SpanData data, String methodName, InetSocketAddress addr,
|
private void assertRpcAttribute(SpanData data, String methodName, InetSocketAddress addr,
|
||||||
SpanKind kind) {
|
SpanKind kind) {
|
||||||
assertEquals(SERVICE.getDescriptorForType().getName(),
|
assertEquals(SERVICE.getDescriptorForType().getName(),
|
||||||
data.getAttributes().get(TraceUtil.RPC_SERVICE_KEY));
|
data.getAttributes().get(HBaseSemanticAttributes.RPC_SERVICE_KEY));
|
||||||
assertEquals(methodName, data.getAttributes().get(TraceUtil.RPC_METHOD_KEY));
|
assertEquals(methodName, data.getAttributes().get(HBaseSemanticAttributes.RPC_METHOD_KEY));
|
||||||
if (addr != null) {
|
if (addr != null) {
|
||||||
assertEquals(addr.getHostName(), data.getAttributes().get(TraceUtil.REMOTE_HOST_KEY));
|
assertEquals(
|
||||||
assertEquals(addr.getPort(), data.getAttributes().get(TraceUtil.REMOTE_PORT_KEY).intValue());
|
addr.getHostName(),
|
||||||
|
data.getAttributes().get(HBaseSemanticAttributes.REMOTE_HOST_KEY));
|
||||||
|
assertEquals(
|
||||||
|
addr.getPort(),
|
||||||
|
data.getAttributes().get(HBaseSemanticAttributes.REMOTE_PORT_KEY).intValue());
|
||||||
}
|
}
|
||||||
assertEquals(kind, data.getKind());
|
assertEquals(kind, data.getKind());
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.client.TableDescriptor;
|
||||||
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||||
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
import org.apache.hadoop.hbase.testclassification.MediumTests;
|
||||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||||
import org.apache.hadoop.hbase.trace.TraceUtil;
|
import org.apache.hadoop.hbase.trace.HBaseSemanticAttributes;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.wal.WAL;
|
import org.apache.hadoop.hbase.wal.WAL;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
|
@ -115,7 +115,7 @@ public class TestHRegionTracing {
|
||||||
if (!span.getName().equals(spanName)) {
|
if (!span.getName().equals(spanName)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
List<String> regionNames = span.getAttributes().get(TraceUtil.REGION_NAMES_KEY);
|
List<String> regionNames = span.getAttributes().get(HBaseSemanticAttributes.REGION_NAMES_KEY);
|
||||||
return regionNames != null && regionNames.size() == 1 &&
|
return regionNames != null && regionNames.size() == 1 &&
|
||||||
regionNames.get(0).equals(region.getRegionInfo().getRegionNameAsString());
|
regionNames.get(0).equals(region.getRegionInfo().getRegionNameAsString());
|
||||||
}));
|
}));
|
||||||
|
|
Loading…
Reference in New Issue