diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index 7b83fad4f31..2f870b7fd3b 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -30,7 +30,6 @@ import com.google.protobuf.ServiceException; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanKind; -import io.opentelemetry.api.trace.StatusCode; import io.opentelemetry.context.Scope; import java.io.IOException; import java.io.InterruptedIOException; @@ -407,8 +406,7 @@ public class HTable implements Table { public Result[] get(List gets) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.BATCH) - .setContainerOperations(gets); + .setOperation(HBaseSemanticAttributes.Operation.BATCH); return TraceUtil.trace(() -> { if (gets.size() == 1) { return new Result[] { get(gets.get(0)) }; @@ -435,8 +433,7 @@ public class HTable implements Table { throws InterruptedException, IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.BATCH) - .setContainerOperations(actions); + .setOperation(HBaseSemanticAttributes.Operation.BATCH); TraceUtil.traceWithIOException(() -> { int rpcTimeout = writeRpcTimeoutMs; boolean hasRead = false; @@ -476,7 +473,6 @@ public class HTable implements Table { final Span span = new TableOperationSpanBuilder(connection) .setTableName(tableName) .setOperation(HBaseSemanticAttributes.Operation.BATCH) - .setContainerOperations(actions) .build(); try (Scope ignored = span.makeCurrent()) { AsyncRequestFuture ars = multiAp.submit(task); @@ -485,7 +481,6 @@ public class HTable implements Table { TraceUtil.setError(span, ars.getErrors()); throw ars.getErrors(); } - span.setStatus(StatusCode.OK); } finally { span.end(); } @@ -517,7 +512,6 @@ public class HTable implements Table { final Span span = new TableOperationSpanBuilder(connection) .setTableName(tableName) .setOperation(HBaseSemanticAttributes.Operation.BATCH) - .setContainerOperations(actions) .build(); try (Scope ignored = span.makeCurrent()) { AsyncRequestFuture ars = connection.getAsyncProcess().submit(task); @@ -557,8 +551,7 @@ public class HTable implements Table { public void delete(final List deletes) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.BATCH) - .setContainerOperations(deletes); + .setOperation(HBaseSemanticAttributes.Operation.BATCH); TraceUtil.traceWithIOException(() -> { Object[] results = new Object[deletes.size()]; try { @@ -607,8 +600,7 @@ public class HTable implements Table { public void put(final List puts) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.BATCH) - .setContainerOperations(puts); + .setOperation(HBaseSemanticAttributes.Operation.BATCH); TraceUtil.traceWithIOException(() -> { for (Put put : puts) { validatePut(put); @@ -626,8 +618,7 @@ public class HTable implements Table { public Result mutateRow(final RowMutations rm) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.BATCH) - .setContainerOperations(rm); + .setOperation(HBaseSemanticAttributes.Operation.BATCH); return TraceUtil.trace(() -> { long nonceGroup = getNonceGroup(); long nonce = getNonce(); @@ -782,8 +773,7 @@ public class HTable implements Table { final byte [] value, final Put put) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE, HBaseSemanticAttributes.Operation.PUT); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return TraceUtil.trace( () -> doCheckAndMutate(row, family, qualifier, CompareOperator.EQUAL, value, null, null, put) .isSuccess(), @@ -796,8 +786,7 @@ public class HTable implements Table { final CompareOp compareOp, final byte [] value, final Put put) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE, HBaseSemanticAttributes.Operation.PUT); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return TraceUtil.trace( () -> doCheckAndMutate(row, family, qualifier, toCompareOperator(compareOp), value, null, null, put).isSuccess(), @@ -810,8 +799,7 @@ public class HTable implements Table { final CompareOperator op, final byte [] value, final Put put) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE, HBaseSemanticAttributes.Operation.PUT); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return TraceUtil.trace( () -> doCheckAndMutate(row, family, qualifier, op, value, null, null, put).isSuccess(), supplier); @@ -823,8 +811,7 @@ public class HTable implements Table { final byte[] value, final Delete delete) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE, HBaseSemanticAttributes.Operation.DELETE); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return TraceUtil.trace( () -> doCheckAndMutate(row, family, qualifier, CompareOperator.EQUAL, value, null, null, delete).isSuccess(), @@ -837,8 +824,7 @@ public class HTable implements Table { final CompareOp compareOp, final byte[] value, final Delete delete) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE, HBaseSemanticAttributes.Operation.DELETE); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return TraceUtil.trace( () -> doCheckAndMutate(row, family, qualifier, toCompareOperator(compareOp), value, null, null, delete).isSuccess(), @@ -851,8 +837,7 @@ public class HTable implements Table { final CompareOperator op, final byte[] value, final Delete delete) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE, HBaseSemanticAttributes.Operation.DELETE); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return TraceUtil.trace( () -> doCheckAndMutate(row, family, qualifier, op, value, null, null, delete).isSuccess(), supplier); @@ -929,8 +914,7 @@ public class HTable implements Table { final CompareOp compareOp, final byte [] value, final RowMutations rm) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(rm); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return TraceUtil.trace( () -> doCheckAndMutate(row, family, qualifier, toCompareOperator(compareOp), value, null, null, rm).isSuccess(), @@ -943,8 +927,7 @@ public class HTable implements Table { final CompareOperator op, final byte [] value, final RowMutations rm) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(rm); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return TraceUtil.trace( () -> doCheckAndMutate(row, family, qualifier, op, value, null, null, rm).isSuccess(), supplier); @@ -954,8 +937,7 @@ public class HTable implements Table { public CheckAndMutateResult checkAndMutate(CheckAndMutate checkAndMutate) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(checkAndMutate) - .setContainerOperations(checkAndMutate); + .setOperation(checkAndMutate); return TraceUtil.trace(() -> { Row action = checkAndMutate.getAction(); if (action instanceof Put || action instanceof Delete || action instanceof Increment || @@ -1004,8 +986,7 @@ public class HTable implements Table { throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.BATCH) - .setContainerOperations(checkAndMutates); + .setOperation(HBaseSemanticAttributes.Operation.BATCH); return TraceUtil.trace(() -> { if (checkAndMutates.isEmpty()) { return Collections.emptyList(); @@ -1075,8 +1056,7 @@ public class HTable implements Table { public boolean[] exists(List gets) throws IOException { final Supplier supplier = new TableOperationSpanBuilder(connection) .setTableName(tableName) - .setOperation(HBaseSemanticAttributes.Operation.BATCH) - .setContainerOperations(gets); + .setOperation(HBaseSemanticAttributes.Operation.BATCH); return TraceUtil.trace(() -> { if (gets.isEmpty()) { return new boolean[] {}; diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java index 20e84cc6f73..c3cc1fb36f5 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java @@ -386,8 +386,7 @@ class RawAsyncTableImpl implements AsyncTable { validatePut(put, conn.connConf.getMaxKeyValueSize()); preCheck(); final Supplier supplier = newTableOperationSpanBuilder() - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(put); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return tracedFuture( () -> RawAsyncTableImpl.this. newCaller(row, put.getPriority(), rpcTimeoutNs) .action((controller, loc, stub) -> RawAsyncTableImpl.mutate(controller, loc, stub, put, @@ -402,8 +401,7 @@ class RawAsyncTableImpl implements AsyncTable { public CompletableFuture thenDelete(Delete delete) { preCheck(); final Supplier supplier = newTableOperationSpanBuilder() - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(delete); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return tracedFuture( () -> RawAsyncTableImpl.this. newCaller(row, delete.getPriority(), rpcTimeoutNs) .action((controller, loc, stub) -> RawAsyncTableImpl.mutate(controller, loc, stub, delete, @@ -419,8 +417,7 @@ class RawAsyncTableImpl implements AsyncTable { preCheck(); validatePutsInRowMutations(mutations, conn.connConf.getMaxKeyValueSize()); final Supplier supplier = newTableOperationSpanBuilder() - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(mutations); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return tracedFuture( () -> RawAsyncTableImpl.this . newCaller(row, mutations.getMaxPriority(), rpcTimeoutNs) @@ -463,8 +460,7 @@ class RawAsyncTableImpl implements AsyncTable { public CompletableFuture thenPut(Put put) { validatePut(put, conn.connConf.getMaxKeyValueSize()); final Supplier supplier = newTableOperationSpanBuilder() - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(put); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return tracedFuture( () -> RawAsyncTableImpl.this. newCaller(row, put.getPriority(), rpcTimeoutNs) .action((controller, loc, stub) -> RawAsyncTableImpl.mutate(controller, loc, @@ -479,8 +475,7 @@ class RawAsyncTableImpl implements AsyncTable { @Override public CompletableFuture thenDelete(Delete delete) { final Supplier supplier = newTableOperationSpanBuilder() - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(delete); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return tracedFuture( () -> RawAsyncTableImpl.this. newCaller(row, delete.getPriority(), rpcTimeoutNs) .action((controller, loc, stub) -> RawAsyncTableImpl.mutate(controller, loc, stub, delete, @@ -495,8 +490,7 @@ class RawAsyncTableImpl implements AsyncTable { public CompletableFuture thenMutate(RowMutations mutations) { validatePutsInRowMutations(mutations, conn.connConf.getMaxKeyValueSize()); final Supplier supplier = newTableOperationSpanBuilder() - .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE) - .setContainerOperations(mutations); + .setOperation(HBaseSemanticAttributes.Operation.CHECK_AND_MUTATE); return tracedFuture( () -> RawAsyncTableImpl.this . newCaller(row, mutations.getMaxPriority(), rpcTimeoutNs) @@ -518,8 +512,7 @@ class RawAsyncTableImpl implements AsyncTable { @Override public CompletableFuture checkAndMutate(CheckAndMutate checkAndMutate) { final Supplier supplier = newTableOperationSpanBuilder() - .setOperation(checkAndMutate) - .setContainerOperations(checkAndMutate.getAction()); + .setOperation(checkAndMutate); return tracedFuture(() -> { if (checkAndMutate.getAction() instanceof Put || checkAndMutate.getAction() instanceof Delete || @@ -572,8 +565,7 @@ class RawAsyncTableImpl implements AsyncTable { public List> checkAndMutate(List checkAndMutates) { final Supplier supplier = newTableOperationSpanBuilder() - .setOperation(checkAndMutates) - .setContainerOperations(checkAndMutates); + .setOperation(checkAndMutates); return tracedFutures( () -> batch(checkAndMutates, rpcTimeoutNs).stream() .map(f -> f.thenApply(r -> (CheckAndMutateResult) r)).collect(toList()), @@ -629,8 +621,7 @@ class RawAsyncTableImpl implements AsyncTable { long nonceGroup = conn.getNonceGenerator().getNonceGroup(); long nonce = conn.getNonceGenerator().newNonce(); final Supplier supplier = newTableOperationSpanBuilder() - .setOperation(mutations) - .setContainerOperations(mutations); + .setOperation(mutations); return tracedFuture( () -> this . newCaller(mutations.getRow(), mutations.getMaxPriority(), writeRpcTimeoutNs) @@ -703,32 +694,28 @@ class RawAsyncTableImpl implements AsyncTable { @Override public List> get(List gets) { final Supplier supplier = newTableOperationSpanBuilder() - .setOperation(gets) - .setContainerOperations(HBaseSemanticAttributes.Operation.GET); + .setOperation(gets); return tracedFutures(() -> batch(gets, readRpcTimeoutNs), supplier); } @Override public List> put(List puts) { final Supplier supplier = newTableOperationSpanBuilder() - .setOperation(puts) - .setContainerOperations(HBaseSemanticAttributes.Operation.PUT); + .setOperation(puts); return tracedFutures(() -> voidMutate(puts), supplier); } @Override public List> delete(List deletes) { final Supplier supplier = newTableOperationSpanBuilder() - .setOperation(deletes) - .setContainerOperations(HBaseSemanticAttributes.Operation.DELETE); + .setOperation(deletes); return tracedFutures(() -> voidMutate(deletes), supplier); } @Override public List> batch(List actions) { final Supplier supplier = newTableOperationSpanBuilder() - .setOperation(actions) - .setContainerOperations(actions); + .setOperation(actions); return tracedFutures(() -> batch(actions, rpcTimeoutNs), supplier); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/trace/TableOperationSpanBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/trace/TableOperationSpanBuilder.java index 7db4edb1a82..2b9314a7ee8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/trace/TableOperationSpanBuilder.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/trace/TableOperationSpanBuilder.java @@ -18,22 +18,15 @@ package org.apache.hadoop.hbase.client.trace; -import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.CONTAINER_DB_OPERATIONS_KEY; import static org.apache.hadoop.hbase.trace.HBaseSemanticAttributes.DB_OPERATION; import io.opentelemetry.api.common.AttributeKey; import io.opentelemetry.api.trace.Span; import io.opentelemetry.api.trace.SpanBuilder; import io.opentelemetry.api.trace.SpanKind; -import java.util.Arrays; import java.util.Collection; import java.util.HashMap; -import java.util.HashSet; -import java.util.List; import java.util.Map; -import java.util.Set; import java.util.function.Supplier; -import java.util.stream.Collectors; -import java.util.stream.Stream; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.AsyncConnectionImpl; @@ -97,72 +90,6 @@ public class TableOperationSpanBuilder implements Supplier { return this; } - // `setContainerOperations` perform a recursive descent expansion of all the operations - // contained within the provided "batch" object. - - public TableOperationSpanBuilder setContainerOperations(final RowMutations mutations) { - final Operation[] ops = mutations.getMutations() - .stream() - .flatMap(row -> Stream.concat(Stream.of(valueFrom(row)), unpackRowOperations(row).stream())) - .toArray(Operation[]::new); - return setContainerOperations(ops); - } - - public TableOperationSpanBuilder setContainerOperations(final Row row) { - final Operation[] ops = - Stream.concat(Stream.of(valueFrom(row)), unpackRowOperations(row).stream()) - .toArray(Operation[]::new); - return setContainerOperations(ops); - } - - public TableOperationSpanBuilder setContainerOperations( - final Collection operations - ) { - final Operation[] ops = operations.stream() - .flatMap(row -> Stream.concat(Stream.of(valueFrom(row)), unpackRowOperations(row).stream())) - .toArray(Operation[]::new); - return setContainerOperations(ops); - } - - private static Set unpackRowOperations(final Row row) { - final Set ops = new HashSet<>(); - if (row instanceof CheckAndMutate) { - final CheckAndMutate cam = (CheckAndMutate) row; - ops.addAll(unpackRowOperations(cam)); - } - if (row instanceof RowMutations) { - final RowMutations mutations = (RowMutations) row; - ops.addAll(unpackRowOperations(mutations)); - } - return ops; - } - - private static Set unpackRowOperations(final CheckAndMutate cam) { - final Set ops = new HashSet<>(); - final Operation op = valueFrom(cam.getAction()); - switch (op) { - case BATCH: - case CHECK_AND_MUTATE: - ops.addAll(unpackRowOperations(cam.getAction())); - break; - default: - ops.add(op); - } - return ops; - } - - public TableOperationSpanBuilder setContainerOperations( - final Operation... operations - ) { - final List ops = Arrays.stream(operations) - .map(op -> op == null ? unknown : op.name()) - .sorted() - .distinct() - .collect(Collectors.toList()); - attributes.put(CONTAINER_DB_OPERATIONS_KEY, ops); - return this; - } - public TableOperationSpanBuilder setTableName(final TableName tableName) { this.tableName = tableName; TableSpanBuilder.populateTableNameAttributes(attributes, tableName); diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableTracing.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableTracing.java index a2e4f5ca85d..05a8ec3e21d 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableTracing.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableTracing.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.client; -import static org.apache.hadoop.hbase.client.trace.hamcrest.AttributesMatchers.containsEntryWithStringValuesOf; -import static org.apache.hadoop.hbase.client.trace.hamcrest.SpanDataMatchers.hasAttributes; import static org.apache.hadoop.hbase.client.trace.hamcrest.SpanDataMatchers.hasEnded; import static org.apache.hadoop.hbase.client.trace.hamcrest.SpanDataMatchers.hasKind; import static org.apache.hadoop.hbase.client.trace.hamcrest.SpanDataMatchers.hasName; @@ -337,9 +335,7 @@ public class TestAsyncTableTracing { .ifEquals(Bytes.toBytes("cf"), Bytes.toBytes("cq"), Bytes.toBytes("v")) .build(new Delete(Bytes.toBytes(0))))).toArray(new CompletableFuture[0])) .join(); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf( - "db.hbase.container_operations", "CHECK_AND_MUTATE", "DELETE"))); + assertTrace("BATCH"); } @Test @@ -347,9 +343,7 @@ public class TestAsyncTableTracing { table.checkAndMutateAll(Arrays.asList(CheckAndMutate.newBuilder(Bytes.toBytes(0)) .ifEquals(Bytes.toBytes("cf"), Bytes.toBytes("cq"), Bytes.toBytes("v")) .build(new Delete(Bytes.toBytes(0))))).join(); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf( - "db.hbase.container_operations", "CHECK_AND_MUTATE", "DELETE"))); + assertTrace("BATCH"); } private void testCheckAndMutateBuilder(Row op) { @@ -434,14 +428,12 @@ public class TestAsyncTableTracing { } @Test - public void testMutateRow() throws IOException { - final RowMutations mutations = new RowMutations(Bytes.toBytes(0)) - .add((Mutation) new Put(Bytes.toBytes(0)) - .addColumn(Bytes.toBytes("cf"), Bytes.toBytes("cq"), Bytes.toBytes("v"))) - .add((Mutation) new Delete(Bytes.toBytes(0))); - table.mutateRow(mutations).join(); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf("db.hbase.container_operations", "DELETE", "PUT"))); + public void testMutateRow() throws Exception { + byte[] row = Bytes.toBytes(0); + RowMutations mutation = new RowMutations(row); + mutation.add(new Delete(row)); + table.mutateRow(mutation).get(); + assertTrace("BATCH"); } @Test @@ -456,15 +448,13 @@ public class TestAsyncTableTracing { .allOf( table.exists(Arrays.asList(new Get(Bytes.toBytes(0)))).toArray(new CompletableFuture[0])) .join(); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf("db.hbase.container_operations", "GET"))); + assertTrace("BATCH"); } @Test public void testExistsAll() { table.existsAll(Arrays.asList(new Get(Bytes.toBytes(0)))).join(); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf("db.hbase.container_operations", "GET"))); + assertTrace("BATCH"); } @Test @@ -472,15 +462,13 @@ public class TestAsyncTableTracing { CompletableFuture .allOf(table.get(Arrays.asList(new Get(Bytes.toBytes(0)))).toArray(new CompletableFuture[0])) .join(); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf("db.hbase.container_operations", "GET"))); + assertTrace("BATCH"); } @Test public void testGetAll() { table.getAll(Arrays.asList(new Get(Bytes.toBytes(0)))).join(); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf("db.hbase.container_operations", "GET"))); + assertTrace("BATCH"); } @Test @@ -489,16 +477,14 @@ public class TestAsyncTableTracing { .allOf(table.put(Arrays.asList(new Put(Bytes.toBytes(0)).addColumn(Bytes.toBytes("cf"), Bytes.toBytes("cq"), Bytes.toBytes("v")))).toArray(new CompletableFuture[0])) .join(); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf("db.hbase.container_operations", "PUT"))); + assertTrace("BATCH"); } @Test public void testPutAll() { table.putAll(Arrays.asList(new Put(Bytes.toBytes(0)).addColumn(Bytes.toBytes("cf"), Bytes.toBytes("cq"), Bytes.toBytes("v")))).join(); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf("db.hbase.container_operations", "PUT"))); + assertTrace("BATCH"); } @Test @@ -507,15 +493,13 @@ public class TestAsyncTableTracing { .allOf( table.delete(Arrays.asList(new Delete(Bytes.toBytes(0)))).toArray(new CompletableFuture[0])) .join(); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf("db.hbase.container_operations", "DELETE"))); + assertTrace("BATCH"); } @Test public void testDeleteAll() { table.deleteAll(Arrays.asList(new Delete(Bytes.toBytes(0)))).join(); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf("db.hbase.container_operations", "DELETE"))); + assertTrace("BATCH"); } @Test @@ -524,14 +508,12 @@ public class TestAsyncTableTracing { .allOf( table.batch(Arrays.asList(new Delete(Bytes.toBytes(0)))).toArray(new CompletableFuture[0])) .join(); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf("db.hbase.container_operations", "DELETE"))); + assertTrace("BATCH"); } @Test public void testBatchAll() { table.batchAll(Arrays.asList(new Delete(Bytes.toBytes(0)))).join(); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf("db.hbase.container_operations", "DELETE"))); + assertTrace("BATCH"); } } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableTracing.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableTracing.java index 4b94ad9f36d..80db9a122e3 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableTracing.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestHTableTracing.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hbase.client; -import static org.apache.hadoop.hbase.client.trace.hamcrest.AttributesMatchers.containsEntryWithStringValuesOf; -import static org.apache.hadoop.hbase.client.trace.hamcrest.SpanDataMatchers.hasAttributes; import static org.apache.hadoop.hbase.client.trace.hamcrest.SpanDataMatchers.hasEnded; import static org.apache.hadoop.hbase.client.trace.hamcrest.SpanDataMatchers.hasKind; import static org.apache.hadoop.hbase.client.trace.hamcrest.SpanDataMatchers.hasName; @@ -322,9 +320,7 @@ public class TestHTableTracing extends TestTracingBase { table.checkAndMutate(CheckAndMutate.newBuilder(Bytes.toBytes(0)) .ifEquals(Bytes.toBytes("cf"), Bytes.toBytes("cq"), Bytes.toBytes("v")) .build(new Delete(Bytes.toBytes(0)))); - assertTrace("CHECK_AND_MUTATE", hasAttributes( - containsEntryWithStringValuesOf( - "db.hbase.container_operations", "CHECK_AND_MUTATE", "DELETE"))); + assertTrace("CHECK_AND_MUTATE"); } @Test @@ -332,9 +328,7 @@ public class TestHTableTracing extends TestTracingBase { table.checkAndMutate(Arrays.asList(CheckAndMutate.newBuilder(Bytes.toBytes(0)) .ifEquals(Bytes.toBytes("cf"), Bytes.toBytes("cq"), Bytes.toBytes("v")) .build(new Delete(Bytes.toBytes(0))))); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf( - "db.hbase.container_operations", "CHECK_AND_MUTATE", "DELETE"))); + assertTrace("BATCH"); } @Test @@ -342,67 +336,51 @@ public class TestHTableTracing extends TestTracingBase { table.checkAndMutate(Arrays.asList(CheckAndMutate.newBuilder(Bytes.toBytes(0)) .ifEquals(Bytes.toBytes("cf"), Bytes.toBytes("cq"), Bytes.toBytes("v")) .build(new Delete(Bytes.toBytes(0))))); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf( - "db.hbase.container_operations", "CHECK_AND_MUTATE", "DELETE"))); + assertTrace("BATCH"); } @Test public void testMutateRow() throws Exception { byte[] row = Bytes.toBytes(0); table.mutateRow(RowMutations.of(Arrays.asList(new Delete(row)))); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf( - "db.hbase.container_operations", "DELETE"))); + assertTrace("BATCH"); } @Test public void testExistsList() throws IOException { table.exists(Arrays.asList(new Get(Bytes.toBytes(0)))); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf( - "db.hbase.container_operations", "GET"))); + assertTrace("BATCH"); } @Test public void testExistsAll() throws IOException { table.existsAll(Arrays.asList(new Get(Bytes.toBytes(0)))); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf( - "db.hbase.container_operations", "GET"))); + assertTrace("BATCH"); } @Test public void testGetList() throws IOException { table.get(Arrays.asList(new Get(Bytes.toBytes(0)))); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf( - "db.hbase.container_operations", "GET"))); + assertTrace("BATCH"); } @Test public void testPutList() throws IOException { table.put(Arrays.asList(new Put(Bytes.toBytes(0)).addColumn(Bytes.toBytes("cf"), Bytes.toBytes("cq"), Bytes.toBytes("v")))); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf( - "db.hbase.container_operations", "PUT"))); + assertTrace("BATCH"); } @Test public void testDeleteList() throws IOException { table.delete(Lists.newArrayList(new Delete(Bytes.toBytes(0)))); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf( - "db.hbase.container_operations", "DELETE"))); + assertTrace("BATCH"); } @Test public void testBatchList() throws IOException, InterruptedException { table.batch(Arrays.asList(new Delete(Bytes.toBytes(0))), null); - assertTrace("BATCH", hasAttributes( - containsEntryWithStringValuesOf( - "db.hbase.container_operations", "DELETE"))); + assertTrace("BATCH"); } @Test @@ -410,4 +388,5 @@ public class TestHTableTracing extends TestTracingBase { table.close(); assertTrace(HTable.class.getSimpleName(), "close", null, TableName.META_TABLE_NAME); } + } diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTracingBase.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTracingBase.java index 2a10d3b9e8c..34633481c21 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTracingBase.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestTracingBase.java @@ -63,7 +63,6 @@ public class TestTracingBase { conf = HBaseConfiguration.create(); conf.set(HConstants.CLIENT_CONNECTION_REGISTRY_IMPL_CONF_KEY, RegistryForTracingTest.class.getName()); - TRACE_RULE.clearSpans(); } protected void assertTrace(String className, String methodName, ServerName serverName, diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseSemanticAttributes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseSemanticAttributes.java index 1a74fdcd65a..fd6ab852e06 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseSemanticAttributes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/trace/HBaseSemanticAttributes.java @@ -36,12 +36,6 @@ public final class HBaseSemanticAttributes { public static final AttributeKey DB_NAME = SemanticAttributes.DB_NAME; public static final AttributeKey DB_OPERATION = SemanticAttributes.DB_OPERATION; public static final AttributeKey TABLE_KEY = AttributeKey.stringKey("db.hbase.table"); - /** - * For operations that themselves ship one or more operations, such as - * {@link Operation#BATCH} and {@link Operation#CHECK_AND_MUTATE}. - */ - public static final AttributeKey> CONTAINER_DB_OPERATIONS_KEY = - AttributeKey.stringArrayKey("db.hbase.container_operations"); public static final AttributeKey> REGION_NAMES_KEY = AttributeKey.stringArrayKey("db.hbase.regions"); public static final AttributeKey RPC_SERVICE_KEY =