HBASE-25591 Upgrade opentelemetry to 0.17.1 (#2971)
Signed-off-by: Guanghao Zhang <zghao@apache.org>
This commit is contained in:
parent
bb8c4967f8
commit
f6ff519dd0
|
@ -24,7 +24,7 @@ import static org.mockito.ArgumentMatchers.anyLong;
|
|||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
import io.opentelemetry.api.trace.Span.Kind;
|
||||
import io.opentelemetry.api.trace.SpanKind;
|
||||
import io.opentelemetry.api.trace.StatusCode;
|
||||
import io.opentelemetry.sdk.testing.junit4.OpenTelemetryRule;
|
||||
import io.opentelemetry.sdk.trace.data.SpanData;
|
||||
|
@ -222,7 +222,7 @@ public class TestAsyncTableTracing {
|
|||
Waiter.waitFor(CONF, 1000,
|
||||
() -> traceRule.getSpans().stream()
|
||||
.anyMatch(span -> span.getName().equals("AsyncTable." + methodName) &&
|
||||
span.getKind() == Kind.INTERNAL && span.hasEnded()));
|
||||
span.getKind() == SpanKind.INTERNAL && span.hasEnded()));
|
||||
SpanData data = traceRule.getSpans().stream()
|
||||
.filter(s -> s.getName().equals("AsyncTable." + methodName)).findFirst().get();
|
||||
assertEquals(StatusCode.OK, data.getStatus().getStatusCode());
|
||||
|
@ -406,7 +406,7 @@ public class TestAsyncTableTracing {
|
|||
Waiter.waitFor(CONF, 1000,
|
||||
() -> traceRule.getSpans().stream()
|
||||
.anyMatch(span -> span.getName().equals("AsyncConnection.close") &&
|
||||
span.getKind() == Kind.INTERNAL && span.hasEnded()));
|
||||
span.getKind() == SpanKind.INTERNAL && span.hasEnded()));
|
||||
SpanData data = traceRule.getSpans().stream()
|
||||
.filter(s -> s.getName().equals("AsyncConnection.close")).findFirst().get();
|
||||
assertEquals(StatusCode.OK, data.getStatus().getStatusCode());
|
||||
|
|
|
@ -196,6 +196,10 @@
|
|||
<groupId>io.opentelemetry</groupId>
|
||||
<artifactId>opentelemetry-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.opentelemetry</groupId>
|
||||
<artifactId>opentelemetry-semconv</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-crypto</artifactId>
|
||||
|
|
|
@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.trace;
|
|||
import io.opentelemetry.api.GlobalOpenTelemetry;
|
||||
import io.opentelemetry.api.common.AttributeKey;
|
||||
import io.opentelemetry.api.trace.Span;
|
||||
import io.opentelemetry.api.trace.Span.Kind;
|
||||
import io.opentelemetry.api.trace.SpanKind;
|
||||
import io.opentelemetry.api.trace.StatusCode;
|
||||
import io.opentelemetry.api.trace.Tracer;
|
||||
import io.opentelemetry.api.trace.attributes.SemanticAttributes;
|
||||
import io.opentelemetry.context.Context;
|
||||
import io.opentelemetry.context.Scope;
|
||||
import io.opentelemetry.semconv.trace.attributes.SemanticAttributes;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
@ -72,14 +72,14 @@ public final class TraceUtil {
|
|||
}
|
||||
|
||||
/**
|
||||
* Create a {@link Kind#INTERNAL} span.
|
||||
* Create a {@link SpanKind#INTERNAL} span.
|
||||
*/
|
||||
public static Span createSpan(String name) {
|
||||
return createSpan(name, Kind.INTERNAL);
|
||||
return createSpan(name, SpanKind.INTERNAL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@link Kind#INTERNAL} span and set table related attributes.
|
||||
* Create a {@link SpanKind#INTERNAL} span and set table related attributes.
|
||||
*/
|
||||
public static Span createTableSpan(String spanName, TableName tableName) {
|
||||
return createSpan(spanName).setAttribute(NAMESPACE_KEY, tableName.getNamespaceAsString())
|
||||
|
@ -88,28 +88,29 @@ public final class TraceUtil {
|
|||
|
||||
/**
|
||||
* Create a span with the given {@code kind}. Notice that, OpenTelemetry only expects one
|
||||
* {@link Kind#CLIENT} span and one {@link Kind#SERVER} span for a traced request, so use this
|
||||
* with caution when you want to create spans with kind other than {@link Kind#INTERNAL}.
|
||||
* {@link SpanKind#CLIENT} span and one {@link SpanKind#SERVER} span for a traced request, so use
|
||||
* this with caution when you want to create spans with kind other than {@link SpanKind#INTERNAL}.
|
||||
*/
|
||||
private static Span createSpan(String name, Kind kind) {
|
||||
private static Span createSpan(String name, SpanKind kind) {
|
||||
return getGlobalTracer().spanBuilder(name).setSpanKind(kind).startSpan();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a span which parent is from remote, i.e, passed through rpc.
|
||||
* </p>
|
||||
* We will set the kind of the returned span to {@link Kind#SERVER}, as this should be the top
|
||||
* We will set the kind of the returned span to {@link SpanKind#SERVER}, as this should be the top
|
||||
* most span at server side.
|
||||
*/
|
||||
public static Span createRemoteSpan(String name, Context ctx) {
|
||||
return getGlobalTracer().spanBuilder(name).setParent(ctx).setSpanKind(Kind.SERVER).startSpan();
|
||||
return getGlobalTracer().spanBuilder(name).setParent(ctx).setSpanKind(SpanKind.SERVER)
|
||||
.startSpan();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a span with {@link Kind#CLIENT}.
|
||||
* Create a span with {@link SpanKind#CLIENT}.
|
||||
*/
|
||||
public static Span createClientSpan(String name) {
|
||||
return createSpan(name, Kind.CLIENT);
|
||||
return createSpan(name, SpanKind.CLIENT);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -54,7 +54,6 @@ import org.apache.hadoop.hbase.client.ResultScanner;
|
|||
import org.apache.hadoop.hbase.client.RetriesExhaustedException;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptor;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||
import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
|
||||
import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
|
||||
|
@ -378,7 +377,7 @@ public class IntegrationTestMTTR {
|
|||
public void addResult(long time, Span span) {
|
||||
stats.addValue(TimeUnit.MILLISECONDS.convert(time, TimeUnit.NANOSECONDS));
|
||||
if (TimeUnit.SECONDS.convert(time, TimeUnit.NANOSECONDS) >= 1) {
|
||||
traces.add(span.getSpanContext().getTraceIdAsHexString());
|
||||
traces.add(span.getSpanContext().getTraceId());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ import io.opentelemetry.api.GlobalOpenTelemetry;
|
|||
import io.opentelemetry.api.trace.Span;
|
||||
import io.opentelemetry.context.Context;
|
||||
import io.opentelemetry.context.Scope;
|
||||
import io.opentelemetry.context.propagation.TextMapPropagator;
|
||||
import io.opentelemetry.context.propagation.TextMapGetter;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.Closeable;
|
||||
import java.io.DataOutputStream;
|
||||
|
@ -615,7 +615,7 @@ abstract class ServerRpcConnection implements Closeable {
|
|||
ProtobufUtil.mergeFrom(builder, cis, headerSize);
|
||||
RequestHeader header = (RequestHeader) builder.build();
|
||||
offset += headerSize;
|
||||
TextMapPropagator.Getter<RPCTInfo> getter = new TextMapPropagator.Getter<RPCTInfo>() {
|
||||
TextMapGetter<RPCTInfo> getter = new TextMapGetter<RPCTInfo>() {
|
||||
|
||||
@Override
|
||||
public Iterable<String> keys(RPCTInfo carrier) {
|
||||
|
|
|
@ -34,7 +34,7 @@ import static org.mockito.Mockito.spy;
|
|||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.internal.verification.VerificationModeFactory.times;
|
||||
|
||||
import io.opentelemetry.api.trace.Span.Kind;
|
||||
import io.opentelemetry.api.trace.SpanKind;
|
||||
import io.opentelemetry.api.trace.StatusCode;
|
||||
import io.opentelemetry.sdk.testing.junit4.OpenTelemetryRule;
|
||||
import io.opentelemetry.sdk.trace.data.SpanData;
|
||||
|
@ -457,7 +457,7 @@ public abstract class AbstractTestIPC {
|
|||
}
|
||||
|
||||
private void assertRpcAttribute(SpanData data, String methodName, InetSocketAddress addr,
|
||||
Kind kind) {
|
||||
SpanKind kind) {
|
||||
assertEquals(SERVICE.getDescriptorForType().getName(),
|
||||
data.getAttributes().get(TraceUtil.RPC_SERVICE_KEY));
|
||||
assertEquals(methodName, data.getAttributes().get(TraceUtil.RPC_METHOD_KEY));
|
||||
|
@ -471,7 +471,7 @@ public abstract class AbstractTestIPC {
|
|||
private void assertRemoteSpan() {
|
||||
SpanData data = waitSpan("RpcServer.process");
|
||||
assertTrue(data.getParentSpanContext().isRemote());
|
||||
assertEquals(Kind.SERVER, data.getKind());
|
||||
assertEquals(SpanKind.SERVER, data.getKind());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -484,8 +484,8 @@ public abstract class AbstractTestIPC {
|
|||
BlockingInterface stub = newBlockingStub(client, rpcServer.getListenerAddress());
|
||||
stub.pause(null, PauseRequestProto.newBuilder().setMs(100).build());
|
||||
assertRpcAttribute(waitSpan("RpcClient.callMethod"), "pause", rpcServer.getListenerAddress(),
|
||||
Kind.CLIENT);
|
||||
assertRpcAttribute(waitSpan("RpcServer.callMethod"), "pause", null, Kind.INTERNAL);
|
||||
SpanKind.CLIENT);
|
||||
assertRpcAttribute(waitSpan("RpcServer.callMethod"), "pause", null, SpanKind.INTERNAL);
|
||||
assertRemoteSpan();
|
||||
assertSameTraceId();
|
||||
for (SpanData data : traceRule.getSpans()) {
|
||||
|
@ -499,8 +499,8 @@ public abstract class AbstractTestIPC {
|
|||
assertThrows(ServiceException.class,
|
||||
() -> stub.error(null, EmptyRequestProto.getDefaultInstance()));
|
||||
assertRpcAttribute(waitSpan("RpcClient.callMethod"), "error", rpcServer.getListenerAddress(),
|
||||
Kind.CLIENT);
|
||||
assertRpcAttribute(waitSpan("RpcServer.callMethod"), "error", null, Kind.INTERNAL);
|
||||
SpanKind.CLIENT);
|
||||
assertRpcAttribute(waitSpan("RpcServer.callMethod"), "error", null, SpanKind.INTERNAL);
|
||||
assertRemoteSpan();
|
||||
assertSameTraceId();
|
||||
for (SpanData data : traceRule.getSpans()) {
|
||||
|
|
|
@ -23,6 +23,7 @@ import io.opentelemetry.sdk.testing.junit4.OpenTelemetryRule;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
|
@ -48,7 +49,6 @@ import org.apache.hadoop.hbase.wal.WAL;
|
|||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
|
@ -79,18 +79,12 @@ public class TestHRegionTracing {
|
|||
@Rule
|
||||
public final TableNameTestRule tableNameRule = new TableNameTestRule();
|
||||
|
||||
private static WAL WAL;
|
||||
private WAL wal;
|
||||
|
||||
private HRegion region;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUpBeforeClass() throws IOException {
|
||||
WAL = HBaseTestingUtility.createWal(UTIL.getConfiguration(), UTIL.getDataTestDir(), null);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownAfterClass() throws IOException {
|
||||
Closeables.close(WAL, true);
|
||||
UTIL.cleanupTestDir();
|
||||
}
|
||||
|
||||
|
@ -102,7 +96,9 @@ public class TestHRegionTracing {
|
|||
RegionInfo info = RegionInfoBuilder.newBuilder(tableName).build();
|
||||
ChunkCreator.initialize(MemStoreLAB.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null,
|
||||
MemStoreLAB.INDEX_CHUNK_SIZE_PERCENTAGE_DEFAULT);
|
||||
region = HRegion.createHRegion(info, UTIL.getDataTestDir(), UTIL.getConfiguration(), desc, WAL);
|
||||
wal = HBaseTestingUtility.createWal(UTIL.getConfiguration(),
|
||||
new Path(UTIL.getDataTestDir(), tableName.getNameAsString()), null);
|
||||
region = HRegion.createHRegion(info, UTIL.getDataTestDir(), UTIL.getConfiguration(), desc, wal);
|
||||
region = UTIL.createLocalHRegion(info, desc);
|
||||
}
|
||||
|
||||
|
@ -111,6 +107,7 @@ public class TestHRegionTracing {
|
|||
if (region != null) {
|
||||
region.close();
|
||||
}
|
||||
Closeables.close(wal, true);
|
||||
}
|
||||
|
||||
private void assertSpan(String spanName) {
|
||||
|
|
19
pom.xml
19
pom.xml
|
@ -1717,8 +1717,8 @@
|
|||
<jruby.version>9.2.13.0</jruby.version>
|
||||
<junit.version>4.13</junit.version>
|
||||
<hamcrest.version>1.3</hamcrest.version>
|
||||
<opentelemetry.version>0.13.1</opentelemetry.version>
|
||||
<opentelemetry-instrumentation.version>0.13.0</opentelemetry-instrumentation.version>
|
||||
<opentelemetry.version>0.17.1</opentelemetry.version>
|
||||
<opentelemetry-javaagent.version>0.17.0</opentelemetry-javaagent.version>
|
||||
<log4j2.version>2.14.1</log4j2.version>
|
||||
<mockito-core.version>2.28.2</mockito-core.version>
|
||||
<protobuf.plugin.version>0.6.1</protobuf.plugin.version>
|
||||
|
@ -2395,23 +2395,20 @@
|
|||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.opentelemetry</groupId>
|
||||
<artifactId>opentelemetry-api</artifactId>
|
||||
<artifactId>opentelemetry-bom</artifactId>
|
||||
<version>${opentelemetry.version}</version>
|
||||
<type>pom</type>
|
||||
<scope>import</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.opentelemetry</groupId>
|
||||
<artifactId>opentelemetry-sdk</artifactId>
|
||||
<version>${opentelemetry.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.opentelemetry</groupId>
|
||||
<artifactId>opentelemetry-sdk-testing</artifactId>
|
||||
<version>${opentelemetry.version}</version>
|
||||
<artifactId>opentelemetry-semconv</artifactId>
|
||||
<version>${opentelemetry.version}-alpha</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.opentelemetry.javaagent</groupId>
|
||||
<artifactId>opentelemetry-javaagent</artifactId>
|
||||
<version>${opentelemetry.version}</version>
|
||||
<version>${opentelemetry-javaagent.version}</version>
|
||||
<classifier>all</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
Loading…
Reference in New Issue