HBASE-15198 RPC client not using Codec and CellBlock for puts by default.
This commit is contained in:
parent
fec9733893
commit
29a192ef3c
|
@ -303,4 +303,9 @@ public interface ClusterConnection extends HConnection {
|
|||
*/
|
||||
public MetricsConnection getConnectionMetrics();
|
||||
|
||||
/**
|
||||
* @return true when this connection uses a {@link org.apache.hadoop.hbase.codec.Codec} and so
|
||||
* supports cell blocks.
|
||||
*/
|
||||
boolean hasCellBlockSupport();
|
||||
}
|
||||
|
|
|
@ -2255,4 +2255,9 @@ class ConnectionImplementation implements ClusterConnection, Closeable {
|
|||
return RpcRetryingCallerFactory
|
||||
.instantiate(conf, this.interceptor, this.getStatisticsTracker());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasCellBlockSupport() {
|
||||
return this.rpcClient.hasCellBlockSupport();
|
||||
}
|
||||
}
|
|
@ -22,7 +22,6 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.CellScannable;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
|
@ -152,11 +151,8 @@ class MultiServerCallable<R> extends RegionServerCallable<MultiResponse> impleme
|
|||
// This is not exact -- the configuration could have changed on us after connection was set up
|
||||
// but it will do for now.
|
||||
HConnection connection = getConnection();
|
||||
if (connection == null) return true; // Default is to do cellblocks.
|
||||
Configuration configuration = connection.getConfiguration();
|
||||
if (configuration == null) return true;
|
||||
String codec = configuration.get(HConstants.RPC_CODEC_CONF_KEY, "");
|
||||
return codec != null && codec.length() > 0;
|
||||
if (!(connection instanceof ClusterConnection)) return true; // Default is to do cellblocks.
|
||||
return ((ClusterConnection) connection).hasCellBlockSupport();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -149,6 +149,11 @@ public abstract class AbstractRpcClient implements RpcClient {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasCellBlockSupport() {
|
||||
return this.codec != null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encapsulate the ugly casting and RuntimeException conversion in private method.
|
||||
* @param conf configuration
|
||||
|
|
|
@ -83,4 +83,10 @@ import java.io.IOException;
|
|||
* using this client.
|
||||
*/
|
||||
@Override public void close();
|
||||
|
||||
/**
|
||||
* @return true when this client uses a {@link org.apache.hadoop.hbase.codec.Codec} and so
|
||||
* supports cell blocks.
|
||||
*/
|
||||
boolean hasCellBlockSupport();
|
||||
}
|
|
@ -1188,10 +1188,6 @@ public final class ProtobufUtil {
|
|||
valueBuilder.setValue(ByteStringer.wrap(
|
||||
cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
|
||||
valueBuilder.setTimestamp(cell.getTimestamp());
|
||||
if(cell.getTagsLength() > 0) {
|
||||
valueBuilder.setTags(ByteStringer.wrap(cell.getTagsArray(), cell.getTagsOffset(),
|
||||
cell.getTagsLength()));
|
||||
}
|
||||
if (type == MutationType.DELETE || (type == MutationType.PUT && CellUtil.isDelete(cell))) {
|
||||
KeyValue.Type keyValueType = KeyValue.Type.codeToType(cell.getTypeByte());
|
||||
valueBuilder.setDeleteType(toDeleteType(keyValueType));
|
||||
|
|
|
@ -648,8 +648,16 @@ public final class RequestConverter {
|
|||
cells.add(i);
|
||||
builder.addAction(actionBuilder.setMutation(ProtobufUtil.toMutationNoData(
|
||||
MutationType.INCREMENT, i, mutationBuilder, action.getNonce())));
|
||||
} else if (row instanceof RegionCoprocessorServiceExec) {
|
||||
RegionCoprocessorServiceExec exec = (RegionCoprocessorServiceExec) row;
|
||||
builder.addAction(actionBuilder.setServiceCall(
|
||||
ClientProtos.CoprocessorServiceCall.newBuilder()
|
||||
.setRow(ByteStringer.wrap(exec.getRow()))
|
||||
.setServiceName(exec.getMethod().getService().getFullName())
|
||||
.setMethodName(exec.getMethod().getName())
|
||||
.setRequest(exec.getRequest().toByteString())));
|
||||
} else if (row instanceof RowMutations) {
|
||||
continue; // ignore RowMutations
|
||||
throw new UnsupportedOperationException("No RowMutations in multi calls; use mutateRow");
|
||||
} else {
|
||||
throw new DoNotRetryIOException("Multi doesn't support " + row.getClass().getName());
|
||||
}
|
||||
|
|
|
@ -475,4 +475,10 @@ public class TestFromClientSide3 {
|
|||
assertTrue(Arrays.equals(res.getValue(FAMILY, COL_QUAL), VAL_BYTES));
|
||||
table.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConnectionDefaultUsesCodec() throws Exception {
|
||||
ClusterConnection con = (ClusterConnection) TEST_UTIL.getConnection();
|
||||
assertTrue(con.hasCellBlockSupport());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2508,30 +2508,6 @@ public class TestAccessController extends SecureTestUtil {
|
|||
}
|
||||
}
|
||||
|
||||
@Test (timeout=180000)
|
||||
public void testReservedCellTags() throws Exception {
|
||||
AccessTestAction putWithReservedTag = new AccessTestAction() {
|
||||
@Override
|
||||
public Object run() throws Exception {
|
||||
try(Connection conn = ConnectionFactory.createConnection(conf);
|
||||
Table t = conn.getTable(TEST_TABLE);) {
|
||||
KeyValue kv = new KeyValue(TEST_ROW, TEST_FAMILY, TEST_QUALIFIER,
|
||||
HConstants.LATEST_TIMESTAMP, HConstants.EMPTY_BYTE_ARRAY,
|
||||
new Tag[] { new ArrayBackedTag(AccessControlLists.ACL_TAG_TYPE,
|
||||
ProtobufUtil.toUsersAndPermissions(USER_OWNER.getShortName(),
|
||||
new Permission(Permission.Action.READ)).toByteArray()) });
|
||||
t.put(new Put(TEST_ROW).add(kv));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
// Current user is superuser
|
||||
verifyAllowed(putWithReservedTag, User.getCurrent());
|
||||
// No other user should be allowed
|
||||
verifyDenied(putWithReservedTag, USER_OWNER, USER_ADMIN, USER_CREATE, USER_RW, USER_RO);
|
||||
}
|
||||
|
||||
@Test (timeout=180000)
|
||||
public void testSetQuota() throws Exception {
|
||||
AccessTestAction setUserQuotaAction = new AccessTestAction() {
|
||||
|
|
Loading…
Reference in New Issue