diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java index c813bae6665..f23eb82eacc 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.client; import java.io.IOException; import java.util.List; -import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.MasterNotRunningException; @@ -306,4 +305,9 @@ public interface ClusterConnection extends HConnection { */ public MetricsConnection getConnectionMetrics(); + /** + * @return true when this connection uses a {@link org.apache.hadoop.hbase.codec.Codec} and so + * supports cell blocks. + */ + boolean hasCellBlockSupport(); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionAdapter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionAdapter.java index b42593e9d8b..7a18ea536ba 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionAdapter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionAdapter.java @@ -469,4 +469,9 @@ abstract class ConnectionAdapter implements ClusterConnection { public ClientBackoffPolicy getBackoffPolicy() { return wrappedConnection.getBackoffPolicy(); } + + @Override + public boolean hasCellBlockSupport() { + return wrappedConnection.hasCellBlockSupport(); + } } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java index 7a298e20fc6..5aa604da9a1 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionManager.java @@ -57,8 +57,6 @@ import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.MetaTableAccessor; import org.apache.hadoop.hbase.RegionLocations; -import org.apache.hadoop.hbase.RegionTooBusyException; -import org.apache.hadoop.hbase.RetryImmediatelyException; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotEnabledException; @@ -73,7 +71,6 @@ import org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicyFactory; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.exceptions.ClientExceptionsUtil; import org.apache.hadoop.hbase.exceptions.RegionMovedException; -import org.apache.hadoop.hbase.exceptions.RegionOpeningException; import org.apache.hadoop.hbase.ipc.RpcClient; import org.apache.hadoop.hbase.ipc.RpcClientFactory; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; @@ -184,7 +181,6 @@ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequ import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionResponse; -import org.apache.hadoop.hbase.quotas.ThrottlingException; import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; @@ -2620,6 +2616,11 @@ class ConnectionManager { public boolean isManaged() { return managed; } + + @Override + public boolean hasCellBlockSupport() { + return this.rpcClient.hasCellBlockSupport(); + } } /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java index 9dbebb45f17..f02d14df332 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java @@ -22,7 +22,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.DoNotRetryIOException; @@ -152,11 +151,8 @@ class MultiServerCallable extends RegionServerCallable impleme // This is not exact -- the configuration could have changed on us after connection was set up // but it will do for now. HConnection connection = getConnection(); - if (connection == null) return true; // Default is to do cellblocks. - Configuration configuration = connection.getConfiguration(); - if (configuration == null) return true; - String codec = configuration.get(HConstants.RPC_CODEC_CONF_KEY, ""); - return codec != null && codec.length() > 0; + if (!(connection instanceof ClusterConnection)) return true; // Default is to do cellblocks. + return ((ClusterConnection) connection).hasCellBlockSupport(); } @Override diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java index e33ef3a1479..a53fb707082 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java @@ -149,6 +149,11 @@ public abstract class AbstractRpcClient implements RpcClient { } } + @Override + public boolean hasCellBlockSupport() { + return this.codec != null; + } + /** * Encapsulate the ugly casting and RuntimeException conversion in private method. * @param conf configuration diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java index cf689f5860c..540e224bdfe 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java @@ -83,4 +83,10 @@ import java.io.IOException; * using this client. */ @Override public void close(); + + /** + * @return true when this client uses a {@link org.apache.hadoop.hbase.codec.Codec} and so + * supports cell blocks. + */ + boolean hasCellBlockSupport(); } \ No newline at end of file diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index e19f2f0939a..6fa17c95042 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -73,7 +73,6 @@ import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.io.LimitInputStream; import org.apache.hadoop.hbase.io.TimeRange; -import org.apache.hadoop.hbase.protobuf.generated.RPCProtos; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos; import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService; @@ -1206,10 +1205,6 @@ public final class ProtobufUtil { valueBuilder.setValue(ByteStringer.wrap( cell.getValueArray(), cell.getValueOffset(), cell.getValueLength())); valueBuilder.setTimestamp(cell.getTimestamp()); - if(cell.getTagsLength() > 0) { - valueBuilder.setTags(ByteStringer.wrap(cell.getTagsArray(), cell.getTagsOffset(), - cell.getTagsLength())); - } if (type == MutationType.DELETE || (type == MutationType.PUT && CellUtil.isDelete(cell))) { KeyValue.Type keyValueType = KeyValue.Type.codeToType(cell.getTypeByte()); valueBuilder.setDeleteType(toDeleteType(keyValueType)); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java index 96f5d9ec0a8..be2bab25baa 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/RequestConverter.java @@ -21,8 +21,6 @@ import java.io.IOException; import java.util.List; import java.util.regex.Pattern; -import org.apache.hadoop.hbase.util.ByteStringer; - import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.DoNotRetryIOException; @@ -680,8 +678,15 @@ public final class RequestConverter { cells.add(i); builder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutationNoData( MutationType.INCREMENT, i, mutationBuilder, action.getNonce()))); + } else if (row instanceof RegionCoprocessorServiceExec) { + RegionCoprocessorServiceExec exec = (RegionCoprocessorServiceExec) row; + builder.addAction(actionBuilder.setServiceCall(ClientProtos.CoprocessorServiceCall + .newBuilder().setRow(ByteStringer.wrap(exec.getRow())) + .setServiceName(exec.getMethod().getService().getFullName()) + .setMethodName(exec.getMethod().getName()) + .setRequest(exec.getRequest().toByteString()))); } else if (row instanceof RowMutations) { - continue; // ignore RowMutations + throw new UnsupportedOperationException("No RowMutations in multi calls; use mutateRow"); } else { throw new DoNotRetryIOException("Multi doesn't support " + row.getClass().getName()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index fcdd334d9a1..75e69b67b2e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -2505,30 +2505,6 @@ public class TestAccessController extends SecureTestUtil { } } - @Test (timeout=180000) - public void testReservedCellTags() throws Exception { - AccessTestAction putWithReservedTag = new AccessTestAction() { - @Override - public Object run() throws Exception { - try(Connection conn = ConnectionFactory.createConnection(conf); - Table t = conn.getTable(TEST_TABLE);) { - KeyValue kv = new KeyValue(TEST_ROW, TEST_FAMILY, TEST_QUALIFIER, - HConstants.LATEST_TIMESTAMP, HConstants.EMPTY_BYTE_ARRAY, - new Tag[] { new Tag(AccessControlLists.ACL_TAG_TYPE, - ProtobufUtil.toUsersAndPermissions(USER_OWNER.getShortName(), - new Permission(Permission.Action.READ)).toByteArray()) }); - t.put(new Put(TEST_ROW).add(kv)); - } - return null; - } - }; - - // Current user is superuser - verifyAllowed(putWithReservedTag, User.getCurrent()); - // No other user should be allowed - verifyDenied(putWithReservedTag, USER_OWNER, USER_ADMIN, USER_CREATE, USER_RW, USER_RO); - } - @Test (timeout=180000) public void testSetQuota() throws Exception { AccessTestAction setUserQuotaAction = new AccessTestAction() {