HBASE-14997 Move compareOp and Comparators out of filter to client package

Add a new generic CompareOperator enum and use it instead of the filter
CompareOp enum everywhere BUT inside CompareFilter.
This commit is contained in:
Apekshit Sharma 2017-08-29 10:27:21 -07:00 committed by Michael Stack
parent db9994c669
commit 56cba5e450
24 changed files with 891 additions and 595 deletions

View File

@ -21,6 +21,7 @@ import static java.util.stream.Collectors.toList;
import static org.apache.hadoop.hbase.client.ConnectionUtils.allOf; import static org.apache.hadoop.hbase.client.ConnectionUtils.allOf;
import static org.apache.hadoop.hbase.client.ConnectionUtils.toCheckExistenceOnly; import static org.apache.hadoop.hbase.client.ConnectionUtils.toCheckExistenceOnly;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import java.util.List; import java.util.List;
@ -30,7 +31,6 @@ import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
/** /**
@ -204,7 +204,7 @@ public interface AsyncTableBase {
*/ */
default CompletableFuture<Boolean> checkAndPut(byte[] row, byte[] family, byte[] qualifier, default CompletableFuture<Boolean> checkAndPut(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Put put) { byte[] value, Put put) {
return checkAndPut(row, family, qualifier, CompareOp.EQUAL, value, put); return checkAndPut(row, family, qualifier, CompareOperator.EQUAL, value, put);
} }
/** /**
@ -221,7 +221,7 @@ public interface AsyncTableBase {
* a {@link CompletableFuture}. * a {@link CompletableFuture}.
*/ */
CompletableFuture<Boolean> checkAndPut(byte[] row, byte[] family, byte[] qualifier, CompletableFuture<Boolean> checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, Put put); CompareOperator compareOp, byte[] value, Put put);
/** /**
* Atomically checks if a row/family/qualifier value equals to the expected value. If it does, it * Atomically checks if a row/family/qualifier value equals to the expected value. If it does, it
@ -237,7 +237,7 @@ public interface AsyncTableBase {
*/ */
default CompletableFuture<Boolean> checkAndDelete(byte[] row, byte[] family, byte[] qualifier, default CompletableFuture<Boolean> checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Delete delete) { byte[] value, Delete delete) {
return checkAndDelete(row, family, qualifier, CompareOp.EQUAL, value, delete); return checkAndDelete(row, family, qualifier, CompareOperator.EQUAL, value, delete);
} }
/** /**
@ -254,7 +254,7 @@ public interface AsyncTableBase {
* by a {@link CompletableFuture}. * by a {@link CompletableFuture}.
*/ */
CompletableFuture<Boolean> checkAndDelete(byte[] row, byte[] family, byte[] qualifier, CompletableFuture<Boolean> checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, Delete delete); CompareOperator compareOp, byte[] value, Delete delete);
/** /**
* Performs multiple mutations atomically on a single row. Currently {@link Put} and * Performs multiple mutations atomically on a single row. Currently {@link Put} and
@ -278,7 +278,7 @@ public interface AsyncTableBase {
*/ */
default CompletableFuture<Boolean> checkAndMutate(byte[] row, byte[] family, byte[] qualifier, default CompletableFuture<Boolean> checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
byte[] value, RowMutations mutation) { byte[] value, RowMutations mutation) {
return checkAndMutate(row, family, qualifier, CompareOp.EQUAL, value, mutation); return checkAndMutate(row, family, qualifier, CompareOperator.EQUAL, value, mutation);
} }
/** /**
@ -295,7 +295,7 @@ public interface AsyncTableBase {
* a {@link CompletableFuture}. * a {@link CompletableFuture}.
*/ */
CompletableFuture<Boolean> checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompletableFuture<Boolean> checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, RowMutations mutation); CompareOperator compareOp, byte[] value, RowMutations mutation);
/** /**
* Return all the results that match the given scan object. * Return all the results that match the given scan object.

View File

@ -25,9 +25,9 @@ import java.util.concurrent.TimeUnit;
import static java.util.stream.Collectors.*; import static java.util.stream.Collectors.*;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.hbase.util.ReflectionUtils;
/** /**
@ -122,14 +122,14 @@ class AsyncTableImpl implements AsyncTable {
@Override @Override
public CompletableFuture<Boolean> checkAndPut(byte[] row, byte[] family, byte[] qualifier, public CompletableFuture<Boolean> checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, Put put) { CompareOperator op, byte[] value, Put put) {
return wrap(rawTable.checkAndPut(row, family, qualifier, compareOp, value, put)); return wrap(rawTable.checkAndPut(row, family, qualifier, op, value, put));
} }
@Override @Override
public CompletableFuture<Boolean> checkAndDelete(byte[] row, byte[] family, byte[] qualifier, public CompletableFuture<Boolean> checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, Delete delete) { CompareOperator op, byte[] value, Delete delete) {
return wrap(rawTable.checkAndDelete(row, family, qualifier, compareOp, value, delete)); return wrap(rawTable.checkAndDelete(row, family, qualifier, op, value, delete));
} }
@Override @Override
@ -139,8 +139,8 @@ class AsyncTableImpl implements AsyncTable {
@Override @Override
public CompletableFuture<Boolean> checkAndMutate(byte[] row, byte[] family, byte[] qualifier, public CompletableFuture<Boolean> checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, RowMutations mutation) { CompareOperator op, byte[] value, RowMutations mutation) {
return wrap(rawTable.checkAndMutate(row, family, qualifier, compareOp, value, mutation)); return wrap(rawTable.checkAndMutate(row, family, qualifier, op, value, mutation));
} }
@Override @Override

View File

@ -18,16 +18,47 @@
*/ */
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
// DO NOT MAKE USE OF THESE IMPORTS! THEY ARE HERE FOR COPROCESSOR ENDPOINTS ONLY. // DO NOT MAKE USE OF THESE IMPORTS! THEY ARE HERE FOR COPROCESSOR ENDPOINTS ONLY.
// Internally, we use shaded protobuf. This below are part of our public API. // Internally, we use shaded protobuf. This below are part of our public API.
//SEE ABOVE NOTE!
import com.google.protobuf.Descriptors; import com.google.protobuf.Descriptors;
import com.google.protobuf.Message; import com.google.protobuf.Message;
import com.google.protobuf.Service; import com.google.protobuf.Service;
import com.google.protobuf.ServiceException; import com.google.protobuf.ServiceException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.Threads;
import java.io.IOException; import java.io.IOException;
import java.io.InterruptedIOException; import java.io.InterruptedIOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -43,37 +74,7 @@ import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log; import static org.apache.hadoop.hbase.client.ConnectionUtils.checkHasFamilies;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
//SEE ABOVE NOTE!
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.Threads;
/** /**
* An implementation of {@link Table}. Used to communicate with a single HBase table. * An implementation of {@link Table}. Used to communicate with a single HBase table.
@ -746,6 +747,27 @@ public class HTable implements Table {
return checkAndPut(row, family, qualifier, CompareOp.EQUAL, value, put); return checkAndPut(row, family, qualifier, CompareOp.EQUAL, value, put);
} }
private boolean doCheckAndPut(final byte [] row, final byte [] family,
final byte [] qualifier, final String opName,
final byte [] value, final Put put)
throws IOException {
ClientServiceCallable<Boolean> callable =
new ClientServiceCallable<Boolean>(this.connection, getName(), row,
this.rpcControllerFactory.newController(), put.getPriority()) {
@Override
protected Boolean rpcCall() throws Exception {
CompareType compareType = CompareType.valueOf(opName);
MutateRequest request = RequestConverter.buildMutateRequest(
getLocation().getRegionInfo().getRegionName(), row, family, qualifier,
new BinaryComparator(value), compareType, put);
MutateResponse response = doMutate(request);
return Boolean.valueOf(response.getProcessed());
}
};
return rpcCallerFactory.<Boolean> newCaller(this.writeRpcTimeout).
callWithRetries(callable, this.operationTimeout);
}
/** /**
* {@inheritDoc} * {@inheritDoc}
*/ */
@ -754,21 +776,20 @@ public class HTable implements Table {
final byte [] qualifier, final CompareOp compareOp, final byte [] value, final byte [] qualifier, final CompareOp compareOp, final byte [] value,
final Put put) final Put put)
throws IOException { throws IOException {
ClientServiceCallable<Boolean> callable = return doCheckAndPut(row, family, qualifier, compareOp.name(), value, put);
new ClientServiceCallable<Boolean>(this.connection, getName(), row, }
this.rpcControllerFactory.newController(), put.getPriority()) {
@Override /**
protected Boolean rpcCall() throws Exception { * {@inheritDoc}
CompareType compareType = CompareType.valueOf(compareOp.name()); */
MutateRequest request = RequestConverter.buildMutateRequest( @Override
getLocation().getRegionInfo().getRegionName(), row, family, qualifier, public boolean checkAndPut(final byte [] row, final byte [] family,
new BinaryComparator(value), compareType, put); final byte [] qualifier, final CompareOperator op,
MutateResponse response = doMutate(request); final byte [] value, final Put put)
return Boolean.valueOf(response.getProcessed()); throws IOException {
} // The name of the operators in CompareOperator are intentionally those of the
}; // operators in the filter's CompareOp enum.
return rpcCallerFactory.<Boolean> newCaller(this.writeRpcTimeout). return doCheckAndPut(row, family, qualifier, op.name(), value, put);
callWithRetries(callable, this.operationTimeout);
} }
/** /**
@ -780,24 +801,20 @@ public class HTable implements Table {
return checkAndDelete(row, family, qualifier, CompareOp.EQUAL, value, delete); return checkAndDelete(row, family, qualifier, CompareOp.EQUAL, value, delete);
} }
/** private boolean doCheckAndDelete(final byte [] row, final byte [] family,
* {@inheritDoc} final byte [] qualifier, final String opName,
*/ final byte [] value, final Delete delete)
@Override
public boolean checkAndDelete(final byte [] row, final byte [] family,
final byte [] qualifier, final CompareOp compareOp, final byte [] value,
final Delete delete)
throws IOException { throws IOException {
CancellableRegionServerCallable<SingleResponse> callable = CancellableRegionServerCallable<SingleResponse> callable =
new CancellableRegionServerCallable<SingleResponse>( new CancellableRegionServerCallable<SingleResponse>(
this.connection, getName(), row, this.rpcControllerFactory.newController(), this.connection, getName(), row, this.rpcControllerFactory.newController(),
writeRpcTimeout, new RetryingTimeTracker().start(), delete.getPriority()) { writeRpcTimeout, new RetryingTimeTracker().start(), delete.getPriority()) {
@Override @Override
protected SingleResponse rpcCall() throws Exception { protected SingleResponse rpcCall() throws Exception {
CompareType compareType = CompareType.valueOf(compareOp.name()); CompareType compareType = CompareType.valueOf(opName);
MutateRequest request = RequestConverter.buildMutateRequest( MutateRequest request = RequestConverter.buildMutateRequest(
getLocation().getRegionInfo().getRegionName(), row, family, qualifier, getLocation().getRegionInfo().getRegionName(), row, family, qualifier,
new BinaryComparator(value), compareType, delete); new BinaryComparator(value), compareType, delete);
MutateResponse response = doMutate(request); MutateResponse response = doMutate(request);
return ResponseConverter.getResult(request, response, getRpcControllerCellScanner()); return ResponseConverter.getResult(request, response, getRpcControllerCellScanner());
} }
@ -805,16 +822,16 @@ public class HTable implements Table {
List<Delete> rows = Collections.singletonList(delete); List<Delete> rows = Collections.singletonList(delete);
Object[] results = new Object[1]; Object[] results = new Object[1];
AsyncProcessTask task = AsyncProcessTask.newBuilder() AsyncProcessTask task = AsyncProcessTask.newBuilder()
.setPool(pool) .setPool(pool)
.setTableName(tableName) .setTableName(tableName)
.setRowAccess(rows) .setRowAccess(rows)
.setCallable(callable) .setCallable(callable)
// TODO any better timeout? // TODO any better timeout?
.setRpcTimeout(Math.max(readRpcTimeout, writeRpcTimeout)) .setRpcTimeout(Math.max(readRpcTimeout, writeRpcTimeout))
.setOperationTimeout(operationTimeout) .setOperationTimeout(operationTimeout)
.setSubmittedRows(AsyncProcessTask.SubmittedRows.ALL) .setSubmittedRows(AsyncProcessTask.SubmittedRows.ALL)
.setResults(results) .setResults(results)
.build(); .build();
AsyncRequestFuture ars = multiAp.submit(task); AsyncRequestFuture ars = multiAp.submit(task);
ars.waitUntilDone(); ars.waitUntilDone();
if (ars.hasError()) { if (ars.hasError()) {
@ -827,31 +844,49 @@ public class HTable implements Table {
* {@inheritDoc} * {@inheritDoc}
*/ */
@Override @Override
public boolean checkAndMutate(final byte [] row, final byte [] family, final byte [] qualifier, public boolean checkAndDelete(final byte [] row, final byte [] family,
final CompareOp compareOp, final byte [] value, final RowMutations rm) final byte [] qualifier, final CompareOp compareOp, final byte [] value,
throws IOException { final Delete delete)
throws IOException {
return doCheckAndDelete(row, family, qualifier, compareOp.name(), value, delete);
}
/**
* {@inheritDoc}
*/
@Override
public boolean checkAndDelete(final byte [] row, final byte [] family,
final byte [] qualifier, final CompareOperator op,
final byte [] value, final Delete delete)
throws IOException {
return doCheckAndDelete(row, family, qualifier, op.name(), value, delete);
}
private boolean doCheckAndMutate(final byte [] row, final byte [] family, final byte [] qualifier,
final String opName, final byte [] value, final RowMutations rm)
throws IOException {
CancellableRegionServerCallable<MultiResponse> callable = CancellableRegionServerCallable<MultiResponse> callable =
new CancellableRegionServerCallable<MultiResponse>(connection, getName(), rm.getRow(), new CancellableRegionServerCallable<MultiResponse>(connection, getName(), rm.getRow(),
rpcControllerFactory.newController(), writeRpcTimeout, new RetryingTimeTracker().start(), rm.getMaxPriority()) { rpcControllerFactory.newController(), writeRpcTimeout, new RetryingTimeTracker().start(), rm.getMaxPriority()) {
@Override @Override
protected MultiResponse rpcCall() throws Exception { protected MultiResponse rpcCall() throws Exception {
CompareType compareType = CompareType.valueOf(compareOp.name()); CompareType compareType = CompareType.valueOf(opName);
MultiRequest request = RequestConverter.buildMutateRequest( MultiRequest request = RequestConverter.buildMutateRequest(
getLocation().getRegionInfo().getRegionName(), row, family, qualifier, getLocation().getRegionInfo().getRegionName(), row, family, qualifier,
new BinaryComparator(value), compareType, rm); new BinaryComparator(value), compareType, rm);
ClientProtos.MultiResponse response = doMulti(request); ClientProtos.MultiResponse response = doMulti(request);
ClientProtos.RegionActionResult res = response.getRegionActionResultList().get(0); ClientProtos.RegionActionResult res = response.getRegionActionResultList().get(0);
if (res.hasException()) { if (res.hasException()) {
Throwable ex = ProtobufUtil.toException(res.getException()); Throwable ex = ProtobufUtil.toException(res.getException());
if (ex instanceof IOException) { if (ex instanceof IOException) {
throw (IOException)ex; throw (IOException)ex;
}
throw new IOException("Failed to checkAndMutate row: "+
Bytes.toStringBinary(rm.getRow()), ex);
} }
return ResponseConverter.getResults(request, response, getRpcControllerCellScanner()); throw new IOException("Failed to checkAndMutate row: "+
Bytes.toStringBinary(rm.getRow()), ex);
} }
}; return ResponseConverter.getResults(request, response, getRpcControllerCellScanner());
}
};
/** /**
* Currently, we use one array to store 'processed' flag which is returned by server. * Currently, we use one array to store 'processed' flag which is returned by server.
@ -859,16 +894,16 @@ public class HTable implements Table {
* */ * */
Object[] results = new Object[rm.getMutations().size()]; Object[] results = new Object[rm.getMutations().size()];
AsyncProcessTask task = AsyncProcessTask.newBuilder() AsyncProcessTask task = AsyncProcessTask.newBuilder()
.setPool(pool) .setPool(pool)
.setTableName(tableName) .setTableName(tableName)
.setRowAccess(rm.getMutations()) .setRowAccess(rm.getMutations())
.setResults(results) .setResults(results)
.setCallable(callable) .setCallable(callable)
// TODO any better timeout? // TODO any better timeout?
.setRpcTimeout(Math.max(readRpcTimeout, writeRpcTimeout)) .setRpcTimeout(Math.max(readRpcTimeout, writeRpcTimeout))
.setOperationTimeout(operationTimeout) .setOperationTimeout(operationTimeout)
.setSubmittedRows(AsyncProcessTask.SubmittedRows.ALL) .setSubmittedRows(AsyncProcessTask.SubmittedRows.ALL)
.build(); .build();
AsyncRequestFuture ars = multiAp.submit(task); AsyncRequestFuture ars = multiAp.submit(task);
ars.waitUntilDone(); ars.waitUntilDone();
if (ars.hasError()) { if (ars.hasError()) {
@ -878,6 +913,26 @@ public class HTable implements Table {
return ((Result)results[0]).getExists(); return ((Result)results[0]).getExists();
} }
/**
* {@inheritDoc}
*/
@Override
public boolean checkAndMutate(final byte [] row, final byte [] family, final byte [] qualifier,
final CompareOp compareOp, final byte [] value, final RowMutations rm)
throws IOException {
return doCheckAndMutate(row, family, qualifier, compareOp.name(), value, rm);
}
/**
* {@inheritDoc}
*/
@Override
public boolean checkAndMutate(final byte [] row, final byte [] family, final byte [] qualifier,
final CompareOperator op, final byte [] value, final RowMutations rm)
throws IOException {
return doCheckAndMutate(row, family, qualifier, op.name(), value, rm);
}
/** /**
* {@inheritDoc} * {@inheritDoc}
*/ */

View File

@ -35,6 +35,9 @@ import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultima
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
/**
* Base class for HBase read operations; e.g. Scan and Get.
*/
@InterfaceAudience.Public @InterfaceAudience.Public
public abstract class Query extends OperationWithAttributes { public abstract class Query extends OperationWithAttributes {
private static final String ISOLATION_LEVEL = "_isolationlevel_"; private static final String ISOLATION_LEVEL = "_isolationlevel_";

View File

@ -37,13 +37,13 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function; import java.util.function.Function;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder; import org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.SingleRequestCallerBuilder;
import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback; import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
@ -253,24 +253,24 @@ class RawAsyncTableImpl implements RawAsyncTable {
@Override @Override
public CompletableFuture<Boolean> checkAndPut(byte[] row, byte[] family, byte[] qualifier, public CompletableFuture<Boolean> checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, Put put) { CompareOperator op, byte[] value, Put put) {
return this.<Boolean> newCaller(row, rpcTimeoutNs) return this.<Boolean> newCaller(row, rpcTimeoutNs)
.action((controller, loc, stub) -> RawAsyncTableImpl.<Put, Boolean> mutate(controller, loc, .action((controller, loc, stub) -> RawAsyncTableImpl.<Put, Boolean> mutate(controller, loc,
stub, put, stub, put,
(rn, p) -> RequestConverter.buildMutateRequest(rn, row, family, qualifier, (rn, p) -> RequestConverter.buildMutateRequest(rn, row, family, qualifier,
new BinaryComparator(value), CompareType.valueOf(compareOp.name()), p), new BinaryComparator(value), CompareType.valueOf(op.name()), p),
(c, r) -> r.getProcessed())) (c, r) -> r.getProcessed()))
.call(); .call();
} }
@Override @Override
public CompletableFuture<Boolean> checkAndDelete(byte[] row, byte[] family, byte[] qualifier, public CompletableFuture<Boolean> checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, Delete delete) { CompareOperator op, byte[] value, Delete delete) {
return this.<Boolean> newCaller(row, rpcTimeoutNs) return this.<Boolean> newCaller(row, rpcTimeoutNs)
.action((controller, loc, stub) -> RawAsyncTableImpl.<Delete, Boolean> mutate(controller, .action((controller, loc, stub) -> RawAsyncTableImpl.<Delete, Boolean> mutate(controller,
loc, stub, delete, loc, stub, delete,
(rn, d) -> RequestConverter.buildMutateRequest(rn, row, family, qualifier, (rn, d) -> RequestConverter.buildMutateRequest(rn, row, family, qualifier,
new BinaryComparator(value), CompareType.valueOf(compareOp.name()), d), new BinaryComparator(value), CompareType.valueOf(op.name()), d),
(c, r) -> r.getProcessed())) (c, r) -> r.getProcessed()))
.call(); .call();
} }
@ -330,12 +330,12 @@ class RawAsyncTableImpl implements RawAsyncTable {
@Override @Override
public CompletableFuture<Boolean> checkAndMutate(byte[] row, byte[] family, byte[] qualifier, public CompletableFuture<Boolean> checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, RowMutations mutation) { CompareOperator op, byte[] value, RowMutations mutation) {
return this.<Boolean> newCaller(mutation, rpcTimeoutNs) return this.<Boolean> newCaller(mutation, rpcTimeoutNs)
.action((controller, loc, stub) -> RawAsyncTableImpl.<Boolean> mutateRow(controller, loc, .action((controller, loc, stub) -> RawAsyncTableImpl.<Boolean> mutateRow(controller, loc,
stub, mutation, stub, mutation,
(rn, rm) -> RequestConverter.buildMutateRequest(rn, row, family, qualifier, (rn, rm) -> RequestConverter.buildMutateRequest(rn, row, family, qualifier,
new BinaryComparator(value), CompareType.valueOf(compareOp.name()), rm), new BinaryComparator(value), CompareType.valueOf(op.name()), rm),
resp -> resp.getExists())) resp -> resp.getExists()))
.call(); .call();
} }

View File

@ -24,6 +24,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -250,10 +251,35 @@ public interface Table extends Closeable {
* @param put data to put if check succeeds * @param put data to put if check succeeds
* @throws IOException e * @throws IOException e
* @return true if the new put was executed, false otherwise * @return true if the new put was executed, false otherwise
* @deprecated Since 2.0.0. Will be removed in 3.0.0. Use
* {@link #checkAndPut(byte[], byte[], byte[], CompareOperator, byte[], Put)}}
*/ */
@Deprecated
boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareFilter.CompareOp compareOp, byte[] value, Put put) throws IOException; CompareFilter.CompareOp compareOp, byte[] value, Put put) throws IOException;
/**
* Atomically checks if a row/family/qualifier value matches the expected
* value. If it does, it adds the put. If the passed value is null, the check
* is for the lack of column (ie: non-existence)
*
* The expected value argument of this call is on the left and the current
* value of the cell is on the right side of the comparison operator.
*
* Ie. eg. GREATER operator means expected value > existing <=> add the put.
*
* @param row to check
* @param family column family to check
* @param qualifier column qualifier to check
* @param op comparison operator to use
* @param value the expected value
* @param put data to put if check succeeds
* @throws IOException e
* @return true if the new put was executed, false otherwise
*/
boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op, byte[] value, Put put) throws IOException;
/** /**
* Deletes the specified cells/row. * Deletes the specified cells/row.
* *
@ -310,10 +336,35 @@ public interface Table extends Closeable {
* @param delete data to delete if check succeeds * @param delete data to delete if check succeeds
* @throws IOException e * @throws IOException e
* @return true if the new delete was executed, false otherwise * @return true if the new delete was executed, false otherwise
* @deprecated Since 2.0.0. Will be removed in 3.0.0. Use
* {@link #checkAndDelete(byte[], byte[], byte[], byte[], Delete)}
*/ */
@Deprecated
boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareFilter.CompareOp compareOp, byte[] value, Delete delete) throws IOException; CompareFilter.CompareOp compareOp, byte[] value, Delete delete) throws IOException;
/**
* Atomically checks if a row/family/qualifier value matches the expected
* value. If it does, it adds the delete. If the passed value is null, the
* check is for the lack of column (ie: non-existence)
*
* The expected value argument of this call is on the left and the current
* value of the cell is on the right side of the comparison operator.
*
* Ie. eg. GREATER operator means expected value > existing <=> add the delete.
*
* @param row to check
* @param family column family to check
* @param qualifier column qualifier to check
* @param op comparison operator to use
* @param value the expected value
* @param delete data to delete if check succeeds
* @throws IOException e
* @return true if the new delete was executed, false otherwise
*/
boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op, byte[] value, Delete delete) throws IOException;
/** /**
* Performs multiple mutations atomically on a single row. Currently * Performs multiple mutations atomically on a single row. Currently
* {@link Put} and {@link Delete} are supported. * {@link Put} and {@link Delete} are supported.
@ -556,10 +607,35 @@ public interface Table extends Closeable {
* @param mutation mutations to perform if check succeeds * @param mutation mutations to perform if check succeeds
* @throws IOException e * @throws IOException e
* @return true if the new put was executed, false otherwise * @return true if the new put was executed, false otherwise
* @deprecated Since 2.0.0. Will be removed in 3.0.0. Use
* {@link #checkAndMutate(byte[], byte[], byte[], CompareOperator, byte[], RowMutations)}
*/ */
@Deprecated
boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareFilter.CompareOp compareOp, byte[] value, RowMutations mutation) throws IOException; CompareFilter.CompareOp compareOp, byte[] value, RowMutations mutation) throws IOException;
/**
* Atomically checks if a row/family/qualifier value matches the expected value.
* If it does, it performs the row mutations. If the passed value is null, the check
* is for the lack of column (ie: non-existence)
*
* The expected value argument of this call is on the left and the current
* value of the cell is on the right side of the comparison operator.
*
* Ie. eg. GREATER operator means expected value > existing <=> perform row mutations.
*
* @param row to check
* @param family column family to check
* @param qualifier column qualifier to check
* @param op the comparison operator
* @param value the expected value
* @param mutation mutations to perform if check succeeds
* @throws IOException e
* @return true if the new put was executed, false otherwise
*/
boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareOperator op,
byte[] value, RowMutations mutation) throws IOException;
/** /**
* Set timeout (millisecond) of each operation in this Table instance, will override the value * Set timeout (millisecond) of each operation in this Table instance, will override the value
* of hbase.client.operation.timeout in configuration. * of hbase.client.operation.timeout in configuration.

View File

@ -24,6 +24,7 @@ import java.util.ArrayList;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
@ -49,8 +50,12 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
*/ */
@InterfaceAudience.Public @InterfaceAudience.Public
public abstract class CompareFilter extends FilterBase { public abstract class CompareFilter extends FilterBase {
/**
/** Comparison operators. */ * Comparison operators. For filters only!
* Use {@link CompareOperator} otherwise.
* It (intentionally) has at least the below enums with same names.
* TODO: Replace with generic {@link CompareOperator}
*/
@InterfaceAudience.Public @InterfaceAudience.Public
public enum CompareOp { public enum CompareOp {
/** less than */ /** less than */
@ -215,4 +220,4 @@ public abstract class CompareFilter extends FilterBase {
this.compareOp.name(), this.compareOp.name(),
Bytes.toStringBinary(this.comparator.getValue())); Bytes.toStringBinary(this.comparator.getValue()));
} }
} }

View File

@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
/**
* Generic set of comparison operators.
* @since 2.0.0
*/
@InterfaceAudience.Public
public enum CompareOperator {
// Keeps same names as the enums over in filter's CompareOp intentionally.
// The convertion of operator to protobuf representation is via a name comparison.
/** less than */
LESS,
/** less than or equal to */
LESS_OR_EQUAL,
/** equals */
EQUAL,
/** not equal */
NOT_EQUAL,
/** greater than or equal to */
GREATER_OR_EQUAL,
/** greater than */
GREATER,
/** no operation */
NO_OP,
}

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -364,7 +365,7 @@ public class TableBasedReplicationQueuesImpl extends ReplicationTableBase
private void safeQueueUpdate(RowMutations mutate) throws ReplicationException, IOException{ private void safeQueueUpdate(RowMutations mutate) throws ReplicationException, IOException{
try (Table replicationTable = getOrBlockOnReplicationTable()) { try (Table replicationTable = getOrBlockOnReplicationTable()) {
boolean updateSuccess = replicationTable.checkAndMutate(mutate.getRow(), boolean updateSuccess = replicationTable.checkAndMutate(mutate.getRow(),
CF_QUEUE, COL_QUEUE_OWNER, CompareFilter.CompareOp.EQUAL, serverNameBytes, mutate); CF_QUEUE, COL_QUEUE_OWNER, CompareOperator.EQUAL, serverNameBytes, mutate);
if (!updateSuccess) { if (!updateSuccess) {
throw new ReplicationException("Failed to update Replication Table because we lost queue " + throw new ReplicationException("Failed to update Replication Table because we lost queue " +
" ownership"); " ownership");
@ -409,7 +410,7 @@ public class TableBasedReplicationQueuesImpl extends ReplicationTableBase
// new owner and update the queue's history // new owner and update the queue's history
try (Table replicationTable = getOrBlockOnReplicationTable()) { try (Table replicationTable = getOrBlockOnReplicationTable()) {
boolean success = replicationTable.checkAndMutate(queue.getRow(), boolean success = replicationTable.checkAndMutate(queue.getRow(),
CF_QUEUE, COL_QUEUE_OWNER, CompareFilter.CompareOp.EQUAL, Bytes.toBytes(originalServer), CF_QUEUE, COL_QUEUE_OWNER, CompareOperator.EQUAL, Bytes.toBytes(originalServer),
claimAndRenameQueue); claimAndRenameQueue);
return success; return success;
} }

View File

@ -19,23 +19,16 @@
package org.apache.hadoop.hbase.rest.client; package org.apache.hadoop.hbase.rest.client;
import java.io.IOException; import com.google.protobuf.Descriptors;
import java.io.InterruptedIOException; import com.google.protobuf.Message;
import java.io.UnsupportedEncodingException; import com.google.protobuf.Service;
import java.net.URLEncoder; import com.google.protobuf.ServiceException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
@ -70,10 +63,17 @@ import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import com.google.protobuf.Descriptors; import java.io.IOException;
import com.google.protobuf.Message; import java.io.InterruptedIOException;
import com.google.protobuf.Service; import java.io.UnsupportedEncodingException;
import com.google.protobuf.ServiceException; import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
/** /**
* HTable interface to remote tables accessed via REST gateway * HTable interface to remote tables accessed via REST gateway
@ -720,6 +720,12 @@ public class RemoteHTable implements Table {
throw new IOException("checkAndPut for non-equal comparison not implemented"); throw new IOException("checkAndPut for non-equal comparison not implemented");
} }
@Override
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOperator compareOp, byte[] value, Put put) throws IOException {
throw new IOException("checkAndPut for non-equal comparison not implemented");
}
@Override @Override
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Delete delete) throws IOException { byte[] value, Delete delete) throws IOException {
@ -764,6 +770,12 @@ public class RemoteHTable implements Table {
throw new IOException("checkAndDelete for non-equal comparison not implemented"); throw new IOException("checkAndDelete for non-equal comparison not implemented");
} }
@Override
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOperator compareOp, byte[] value, Delete delete) throws IOException {
throw new IOException("checkAndDelete for non-equal comparison not implemented");
}
@Override @Override
public Result increment(Increment increment) throws IOException { public Result increment(Increment increment) throws IOException {
throw new IOException("Increment not supported"); throw new IOException("Increment not supported");
@ -841,6 +853,11 @@ public class RemoteHTable implements Table {
throw new UnsupportedOperationException("checkAndMutate not implemented"); throw new UnsupportedOperationException("checkAndMutate not implemented");
} }
@Override public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOperator compareOp, byte[] value, RowMutations rm) throws IOException {
throw new UnsupportedOperationException("checkAndMutate not implemented");
}
@Override @Override
public void setOperationTimeout(int operationTimeout) { public void setOperationTimeout(int operationTimeout) {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();

View File

@ -30,6 +30,7 @@ import java.util.Map;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
@ -170,6 +171,11 @@ public final class HTableWrapper implements Table {
return table.checkAndPut(row, family, qualifier, compareOp, value, put); return table.checkAndPut(row, family, qualifier, compareOp, value, put);
} }
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op, byte[] value, Put put) throws IOException {
return table.checkAndPut(row, family, qualifier, op, value, put);
}
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Delete delete) throws IOException { byte[] value, Delete delete) throws IOException {
return table.checkAndDelete(row, family, qualifier, value, delete); return table.checkAndDelete(row, family, qualifier, value, delete);
@ -180,6 +186,11 @@ public final class HTableWrapper implements Table {
return table.checkAndDelete(row, family, qualifier, compareOp, value, delete); return table.checkAndDelete(row, family, qualifier, compareOp, value, delete);
} }
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op, byte[] value, Delete delete) throws IOException {
return table.checkAndDelete(row, family, qualifier, op, value, delete);
}
public long incrementColumnValue(byte[] row, byte[] family, public long incrementColumnValue(byte[] row, byte[] family,
byte[] qualifier, long amount) throws IOException { byte[] qualifier, long amount) throws IOException {
return table.incrementColumnValue(row, family, qualifier, amount); return table.incrementColumnValue(row, family, qualifier, amount);
@ -292,6 +303,13 @@ public final class HTableWrapper implements Table {
return table.checkAndMutate(row, family, qualifier, compareOp, value, rm); return table.checkAndMutate(row, family, qualifier, compareOp, value, rm);
} }
@Override
public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOperator op, byte[] value, RowMutations rm)
throws IOException {
return table.checkAndMutate(row, family, qualifier, op, value, rm);
}
@Override @Override
public void setOperationTimeout(int operationTimeout) { public void setOperationTimeout(int operationTimeout) {
table.setOperationTimeout(operationTimeout); table.setOperationTimeout(operationTimeout);

View File

@ -19,17 +19,10 @@
package org.apache.hadoop.hbase.coprocessor; package org.apache.hadoop.hbase.coprocessor;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
@ -45,7 +38,6 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.Reference; import org.apache.hadoop.hbase.io.Reference;
import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CacheConfig;
@ -63,9 +55,16 @@ import org.apache.hadoop.hbase.regionserver.StoreFileReader;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker; import org.apache.hadoop.hbase.regionserver.querymatcher.DeleteTracker;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.wal.WALKey;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
/** /**
* Coprocessors implement this interface to observe and mediate client actions * Coprocessors implement this interface to observe and mediate client actions
* on the region. * on the region.
@ -789,7 +788,7 @@ public interface RegionObserver extends Coprocessor {
* @param row row to check * @param row row to check
* @param family column family * @param family column family
* @param qualifier column qualifier * @param qualifier column qualifier
* @param compareOp the comparison operation * @param op the comparison operation
* @param comparator the comparator * @param comparator the comparator
* @param put data to put if check succeeds * @param put data to put if check succeeds
* @param result * @param result
@ -797,9 +796,9 @@ public interface RegionObserver extends Coprocessor {
* processing * processing
*/ */
default boolean preCheckAndPut(final ObserverContext<RegionCoprocessorEnvironment> c, default boolean preCheckAndPut(final ObserverContext<RegionCoprocessorEnvironment> c,
final byte [] row, final byte [] family, final byte [] qualifier, final byte [] row, final byte [] family, final byte [] qualifier,
final CompareOp compareOp, final ByteArrayComparable comparator, final CompareOperator op, final ByteArrayComparable comparator,
final Put put, final boolean result) final Put put, final boolean result)
throws IOException { throws IOException {
return result; return result;
} }
@ -822,7 +821,7 @@ public interface RegionObserver extends Coprocessor {
* @param row row to check * @param row row to check
* @param family column family * @param family column family
* @param qualifier column qualifier * @param qualifier column qualifier
* @param compareOp the comparison operation * @param op the comparison operation
* @param comparator the comparator * @param comparator the comparator
* @param put data to put if check succeeds * @param put data to put if check succeeds
* @param result * @param result
@ -830,7 +829,7 @@ public interface RegionObserver extends Coprocessor {
* processing * processing
*/ */
default boolean preCheckAndPutAfterRowLock(final ObserverContext<RegionCoprocessorEnvironment> c, default boolean preCheckAndPutAfterRowLock(final ObserverContext<RegionCoprocessorEnvironment> c,
final byte[] row, final byte[] family, final byte[] qualifier, final CompareOp compareOp, final byte[] row, final byte[] family, final byte[] qualifier, final CompareOperator op,
final ByteArrayComparable comparator, final Put put, final ByteArrayComparable comparator, final Put put,
final boolean result) throws IOException { final boolean result) throws IOException {
return result; return result;
@ -848,16 +847,16 @@ public interface RegionObserver extends Coprocessor {
* @param row row to check * @param row row to check
* @param family column family * @param family column family
* @param qualifier column qualifier * @param qualifier column qualifier
* @param compareOp the comparison operation * @param op the comparison operation
* @param comparator the comparator * @param comparator the comparator
* @param put data to put if check succeeds * @param put data to put if check succeeds
* @param result from the checkAndPut * @param result from the checkAndPut
* @return the possibly transformed return value to return to client * @return the possibly transformed return value to return to client
*/ */
default boolean postCheckAndPut(final ObserverContext<RegionCoprocessorEnvironment> c, default boolean postCheckAndPut(final ObserverContext<RegionCoprocessorEnvironment> c,
final byte [] row, final byte [] family, final byte [] qualifier, final byte [] row, final byte [] family, final byte [] qualifier,
final CompareOp compareOp, final ByteArrayComparable comparator, final CompareOperator op, final ByteArrayComparable comparator,
final Put put, final boolean result) final Put put, final boolean result)
throws IOException { throws IOException {
return result; return result;
} }
@ -876,16 +875,16 @@ public interface RegionObserver extends Coprocessor {
* @param row row to check * @param row row to check
* @param family column family * @param family column family
* @param qualifier column qualifier * @param qualifier column qualifier
* @param compareOp the comparison operation * @param op the comparison operation
* @param comparator the comparator * @param comparator the comparator
* @param delete delete to commit if check succeeds * @param delete delete to commit if check succeeds
* @param result * @param result
* @return the value to return to client if bypassing default processing * @return the value to return to client if bypassing default processing
*/ */
default boolean preCheckAndDelete(final ObserverContext<RegionCoprocessorEnvironment> c, default boolean preCheckAndDelete(final ObserverContext<RegionCoprocessorEnvironment> c,
final byte [] row, final byte [] family, final byte [] qualifier, final byte [] row, final byte [] family, final byte [] qualifier,
final CompareOp compareOp, final ByteArrayComparable comparator, final CompareOperator op, final ByteArrayComparable comparator,
final Delete delete, final boolean result) final Delete delete, final boolean result)
throws IOException { throws IOException {
return result; return result;
} }
@ -908,7 +907,7 @@ public interface RegionObserver extends Coprocessor {
* @param row row to check * @param row row to check
* @param family column family * @param family column family
* @param qualifier column qualifier * @param qualifier column qualifier
* @param compareOp the comparison operation * @param op the comparison operation
* @param comparator the comparator * @param comparator the comparator
* @param delete delete to commit if check succeeds * @param delete delete to commit if check succeeds
* @param result * @param result
@ -916,7 +915,8 @@ public interface RegionObserver extends Coprocessor {
*/ */
default boolean preCheckAndDeleteAfterRowLock( default boolean preCheckAndDeleteAfterRowLock(
final ObserverContext<RegionCoprocessorEnvironment> c, final ObserverContext<RegionCoprocessorEnvironment> c,
final byte[] row, final byte[] family, final byte[] qualifier, final CompareOp compareOp, final byte[] row, final byte[] family, final byte[] qualifier,
final CompareOperator op,
final ByteArrayComparable comparator, final Delete delete, final ByteArrayComparable comparator, final Delete delete,
final boolean result) throws IOException { final boolean result) throws IOException {
return result; return result;
@ -934,16 +934,16 @@ public interface RegionObserver extends Coprocessor {
* @param row row to check * @param row row to check
* @param family column family * @param family column family
* @param qualifier column qualifier * @param qualifier column qualifier
* @param compareOp the comparison operation * @param op the comparison operation
* @param comparator the comparator * @param comparator the comparator
* @param delete delete to commit if check succeeds * @param delete delete to commit if check succeeds
* @param result from the CheckAndDelete * @param result from the CheckAndDelete
* @return the possibly transformed returned value to return to client * @return the possibly transformed returned value to return to client
*/ */
default boolean postCheckAndDelete(final ObserverContext<RegionCoprocessorEnvironment> c, default boolean postCheckAndDelete(final ObserverContext<RegionCoprocessorEnvironment> c,
final byte [] row, final byte [] family, final byte [] qualifier, final byte [] row, final byte [] family, final byte [] qualifier,
final CompareOp compareOp, final ByteArrayComparable comparator, final CompareOperator op, final ByteArrayComparable comparator,
final Delete delete, final boolean result) final Delete delete, final boolean result)
throws IOException { throws IOException {
return result; return result;
} }

View File

@ -17,15 +17,131 @@
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.HConstants.REPLICATION_SCOPE_LOCAL; import org.apache.commons.logging.Log;
import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.CompoundConfiguration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.RegionTooBusyException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.conf.ConfigurationManager;
import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.exceptions.RegionInRecoveryException;
import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.FilterWrapper;
import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcCall;
import org.apache.hadoop.hbase.ipc.RpcCallContext;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;
import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.regionserver.wal.WALUtil;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Optional; import org.apache.hadoop.hbase.shaded.com.google.common.base.Optional;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables; import org.apache.hadoop.hbase.shaded.com.google.common.io.Closeables;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hadoop.hbase.util.CompressionTest;
import org.apache.hadoop.hbase.util.EncryptionTest;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HashedBytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;
import org.apache.hadoop.io.MultipleIOException;
import org.apache.hadoop.util.StringUtils;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
import java.io.EOFException; import java.io.EOFException;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
@ -76,125 +192,8 @@ import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function; import java.util.function.Function;
import org.apache.commons.logging.Log; import static org.apache.hadoop.hbase.HConstants.REPLICATION_SCOPE_LOCAL;
import org.apache.commons.logging.LogFactory; import static org.apache.hadoop.hbase.util.CollectionUtils.computeIfAbsent;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompoundConfiguration;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.RegionTooBusyException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Tag;
import org.apache.hadoop.hbase.TagUtil;
import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.IsolationLevel;
import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.client.PackagePrivateFieldAccessor;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.conf.ConfigurationManager;
import org.apache.hadoop.hbase.conf.PropagatingConfigurationObserver;
import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionSnare;
import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.exceptions.RegionInRecoveryException;
import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
import org.apache.hadoop.hbase.exceptions.UnknownProtocolException;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.FilterWrapper;
import org.apache.hadoop.hbase.filter.IncompatibleFilterException;
import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.ipc.CallerDisconnectedException;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.ipc.RpcCall;
import org.apache.hadoop.hbase.ipc.RpcCallContext;
import org.apache.hadoop.hbase.ipc.RpcServer;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.monitoring.TaskMonitor;
import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl.WriteEntry;
import org.apache.hadoop.hbase.regionserver.ScannerContext.LimitScope;
import org.apache.hadoop.hbase.regionserver.ScannerContext.NextState;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
import org.apache.hadoop.hbase.regionserver.throttle.CompactionThroughputControllerFactory;
import org.apache.hadoop.hbase.regionserver.throttle.NoLimitThroughputController;
import org.apache.hadoop.hbase.regionserver.throttle.ThroughputController;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.regionserver.wal.WALUtil;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.TextFormat;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionLoad;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotDescription;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.FlushAction;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor.StoreFlushDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor.EventType;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.StoreDescriptor;
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
import org.apache.hadoop.hbase.snapshot.SnapshotManifest;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.CollectionUtils;
import org.apache.hadoop.hbase.util.CompressionTest;
import org.apache.hadoop.hbase.util.EncryptionTest;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.HashedBytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALFactory;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.hadoop.hbase.wal.WALSplitter.MutationReplay;
import org.apache.hadoop.io.MultipleIOException;
import org.apache.hadoop.util.StringUtils;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
@InterfaceAudience.Private @InterfaceAudience.Private
@ -3685,20 +3684,19 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
@Override @Override
public boolean checkAndMutate(byte [] row, byte [] family, byte [] qualifier, public boolean checkAndMutate(byte [] row, byte [] family, byte [] qualifier,
CompareOp compareOp, ByteArrayComparable comparator, Mutation mutation, CompareOperator op, ByteArrayComparable comparator, Mutation mutation, boolean writeToWAL)
boolean writeToWAL)
throws IOException{ throws IOException{
checkMutationType(mutation, row); checkMutationType(mutation, row);
return doCheckAndRowMutate(row, family, qualifier, compareOp, comparator, null, return doCheckAndRowMutate(row, family, qualifier, op, comparator, null,
mutation, writeToWAL); mutation, writeToWAL);
} }
@Override @Override
public boolean checkAndRowMutate(byte [] row, byte [] family, byte [] qualifier, public boolean checkAndRowMutate(byte [] row, byte [] family, byte [] qualifier,
CompareOp compareOp, ByteArrayComparable comparator, RowMutations rm, CompareOperator op, ByteArrayComparable comparator, RowMutations rm,
boolean writeToWAL) boolean writeToWAL)
throws IOException { throws IOException {
return doCheckAndRowMutate(row, family, qualifier, compareOp, comparator, rm, null, return doCheckAndRowMutate(row, family, qualifier, op, comparator, rm, null,
writeToWAL); writeToWAL);
} }
@ -3707,8 +3705,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
* switches in the few places where there is deviation. * switches in the few places where there is deviation.
*/ */
private boolean doCheckAndRowMutate(byte [] row, byte [] family, byte [] qualifier, private boolean doCheckAndRowMutate(byte [] row, byte [] family, byte [] qualifier,
CompareOp compareOp, ByteArrayComparable comparator, RowMutations rowMutations, CompareOperator op, ByteArrayComparable comparator, RowMutations rowMutations,
Mutation mutation, boolean writeToWAL) Mutation mutation, boolean writeToWAL)
throws IOException { throws IOException {
// Could do the below checks but seems wacky with two callers only. Just comment out for now. // Could do the below checks but seems wacky with two callers only. Just comment out for now.
// One caller passes a Mutation, the other passes RowMutation. Presume all good so we don't // One caller passes a Mutation, the other passes RowMutation. Presume all good so we don't
@ -3732,10 +3730,10 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
Boolean processed = null; Boolean processed = null;
if (mutation instanceof Put) { if (mutation instanceof Put) {
processed = this.getCoprocessorHost().preCheckAndPutAfterRowLock(row, family, processed = this.getCoprocessorHost().preCheckAndPutAfterRowLock(row, family,
qualifier, compareOp, comparator, (Put)mutation); qualifier, op, comparator, (Put)mutation);
} else if (mutation instanceof Delete) { } else if (mutation instanceof Delete) {
processed = this.getCoprocessorHost().preCheckAndDeleteAfterRowLock(row, family, processed = this.getCoprocessorHost().preCheckAndDeleteAfterRowLock(row, family,
qualifier, compareOp, comparator, (Delete)mutation); qualifier, op, comparator, (Delete)mutation);
} }
if (processed != null) { if (processed != null) {
return processed; return processed;
@ -3757,7 +3755,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
Cell kv = result.get(0); Cell kv = result.get(0);
cellTs = kv.getTimestamp(); cellTs = kv.getTimestamp();
int compareResult = CellComparator.compareValue(kv, comparator); int compareResult = CellComparator.compareValue(kv, comparator);
matches = matches(compareOp, compareResult); matches = matches(op, compareResult);
} }
// If matches put the new put or delete the new delete // If matches put the new put or delete the new delete
if (matches) { if (matches) {
@ -3813,9 +3811,9 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
} }
} }
private boolean matches(final CompareOp compareOp, final int compareResult) { private boolean matches(final CompareOperator op, final int compareResult) {
boolean matches = false; boolean matches = false;
switch (compareOp) { switch (op) {
case LESS: case LESS:
matches = compareResult < 0; matches = compareResult < 0;
break; break;
@ -3835,7 +3833,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
matches = compareResult > 0; matches = compareResult > 0;
break; break;
default: default:
throw new RuntimeException("Unknown Compare op " + compareOp.name()); throw new RuntimeException("Unknown Compare op " + op.name());
} }
return matches; return matches;
} }

View File

@ -18,31 +18,6 @@
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.net.BindException;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder;
import org.apache.commons.lang3.mutable.MutableObject; import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -53,6 +28,7 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScannable; import org.apache.hadoop.hbase.CellScannable;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.DroppedSnapshotException; import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseIOException;
@ -83,7 +59,6 @@ import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException; import org.apache.hadoop.hbase.exceptions.OutOfOrderScannerNextException;
import org.apache.hadoop.hbase.exceptions.ScannerResetException; import org.apache.hadoop.hbase.exceptions.ScannerResetException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler; import org.apache.hadoop.hbase.ipc.HBaseRPCErrorHandler;
import org.apache.hadoop.hbase.ipc.HBaseRpcController; import org.apache.hadoop.hbase.ipc.HBaseRpcController;
import org.apache.hadoop.hbase.ipc.PriorityFunction; import org.apache.hadoop.hbase.ipc.PriorityFunction;
@ -115,18 +90,6 @@ import org.apache.hadoop.hbase.regionserver.handler.OpenRegionHandler;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.security.Superusers; import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.DNS;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
import org.apache.zookeeper.KeeperException;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.Cache; import org.apache.hadoop.hbase.shaded.com.google.common.cache.Cache;
import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder; import org.apache.hadoop.hbase.shaded.com.google.common.cache.CacheBuilder;
@ -220,6 +183,42 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.BulkLoadDescr
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FlushDescriptor;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.RegionEventDescriptor;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.DNS;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.ServerRegionReplicaUtil;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALKey;
import org.apache.hadoop.hbase.wal.WALSplitter;
import org.apache.hadoop.hbase.zookeeper.ZKSplitLog;
import org.apache.zookeeper.KeeperException;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.net.BindException;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder;
/** /**
* Implements the regionserver RPC services. * Implements the regionserver RPC services.
@ -601,9 +600,9 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
* @param comparator @throws IOException * @param comparator @throws IOException
*/ */
private boolean checkAndRowMutate(final Region region, final List<ClientProtos.Action> actions, private boolean checkAndRowMutate(final Region region, final List<ClientProtos.Action> actions,
final CellScanner cellScanner, byte[] row, byte[] family, byte[] qualifier, final CellScanner cellScanner, byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, ByteArrayComparable comparator, RegionActionResult.Builder builder, CompareOperator op, ByteArrayComparable comparator, RegionActionResult.Builder builder,
ActivePolicyEnforcement spaceQuotaEnforcement) throws IOException { ActivePolicyEnforcement spaceQuotaEnforcement) throws IOException {
if (!region.getRegionInfo().isMetaTable()) { if (!region.getRegionInfo().isMetaTable()) {
regionServer.cacheFlusher.reclaimMemStoreMemory(); regionServer.cacheFlusher.reclaimMemStoreMemory();
} }
@ -642,7 +641,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
builder.addResultOrException( builder.addResultOrException(
resultOrExceptionOrBuilder.build()); resultOrExceptionOrBuilder.build());
} }
return region.checkAndRowMutate(row, family, qualifier, compareOp, return region.checkAndRowMutate(row, family, qualifier, op,
comparator, rm, Boolean.TRUE); comparator, rm, Boolean.TRUE);
} }
@ -2597,11 +2596,12 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
byte[] row = condition.getRow().toByteArray(); byte[] row = condition.getRow().toByteArray();
byte[] family = condition.getFamily().toByteArray(); byte[] family = condition.getFamily().toByteArray();
byte[] qualifier = condition.getQualifier().toByteArray(); byte[] qualifier = condition.getQualifier().toByteArray();
CompareOp compareOp = CompareOp.valueOf(condition.getCompareType().name()); CompareOperator op =
CompareOperator.valueOf(condition.getCompareType().name());
ByteArrayComparable comparator = ByteArrayComparable comparator =
ProtobufUtil.toComparator(condition.getComparator()); ProtobufUtil.toComparator(condition.getComparator());
processed = checkAndRowMutate(region, regionAction.getActionList(), processed = checkAndRowMutate(region, regionAction.getActionList(),
cellScanner, row, family, qualifier, compareOp, cellScanner, row, family, qualifier, op,
comparator, regionActionResultBuilder, spaceQuotaEnforcement); comparator, regionActionResultBuilder, spaceQuotaEnforcement);
} else { } else {
mutateRows(region, regionAction.getActionList(), cellScanner, mutateRows(region, regionAction.getActionList(), cellScanner,
@ -2737,7 +2737,8 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
byte[] row = condition.getRow().toByteArray(); byte[] row = condition.getRow().toByteArray();
byte[] family = condition.getFamily().toByteArray(); byte[] family = condition.getFamily().toByteArray();
byte[] qualifier = condition.getQualifier().toByteArray(); byte[] qualifier = condition.getQualifier().toByteArray();
CompareOp compareOp = CompareOp.valueOf(condition.getCompareType().name()); CompareOperator compareOp =
CompareOperator.valueOf(condition.getCompareType().name());
ByteArrayComparable comparator = ByteArrayComparable comparator =
ProtobufUtil.toComparator(condition.getComparator()); ProtobufUtil.toComparator(condition.getComparator());
if (region.getCoprocessorHost() != null) { if (region.getCoprocessorHost() != null) {
@ -2768,19 +2769,19 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
byte[] row = condition.getRow().toByteArray(); byte[] row = condition.getRow().toByteArray();
byte[] family = condition.getFamily().toByteArray(); byte[] family = condition.getFamily().toByteArray();
byte[] qualifier = condition.getQualifier().toByteArray(); byte[] qualifier = condition.getQualifier().toByteArray();
CompareOp compareOp = CompareOp.valueOf(condition.getCompareType().name()); CompareOperator op = CompareOperator.valueOf(condition.getCompareType().name());
ByteArrayComparable comparator = ByteArrayComparable comparator =
ProtobufUtil.toComparator(condition.getComparator()); ProtobufUtil.toComparator(condition.getComparator());
if (region.getCoprocessorHost() != null) { if (region.getCoprocessorHost() != null) {
processed = region.getCoprocessorHost().preCheckAndDelete( processed = region.getCoprocessorHost().preCheckAndDelete(
row, family, qualifier, compareOp, comparator, delete); row, family, qualifier, op, comparator, delete);
} }
if (processed == null) { if (processed == null) {
boolean result = region.checkAndMutate(row, family, boolean result = region.checkAndMutate(row, family,
qualifier, compareOp, comparator, delete, true); qualifier, op, comparator, delete, true);
if (region.getCoprocessorHost() != null) { if (region.getCoprocessorHost() != null) {
result = region.getCoprocessorHost().postCheckAndDelete(row, family, result = region.getCoprocessorHost().postCheckAndDelete(row, family,
qualifier, compareOp, comparator, delete, result); qualifier, op, comparator, delete, result);
} }
processed = result; processed = result;
} }

View File

@ -23,12 +23,8 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
@ -45,7 +41,6 @@ import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.conf.ConfigurationObserver; import org.apache.hadoop.hbase.conf.ConfigurationObserver;
import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException;
import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceCall;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service; import org.apache.hadoop.hbase.shaded.com.google.protobuf.Service;
@ -370,14 +365,14 @@ public interface Region extends ConfigurationObserver {
* @param row to check * @param row to check
* @param family column family to check * @param family column family to check
* @param qualifier column qualifier to check * @param qualifier column qualifier to check
* @param compareOp the comparison operator * @param op the comparison operator
* @param comparator * @param comparator
* @param mutation * @param mutation
* @param writeToWAL * @param writeToWAL
* @return true if mutation was applied, false otherwise * @return true if mutation was applied, false otherwise
* @throws IOException * @throws IOException
*/ */
boolean checkAndMutate(byte [] row, byte [] family, byte [] qualifier, CompareOp compareOp, boolean checkAndMutate(byte [] row, byte [] family, byte [] qualifier, CompareOperator op,
ByteArrayComparable comparator, Mutation mutation, boolean writeToWAL) throws IOException; ByteArrayComparable comparator, Mutation mutation, boolean writeToWAL) throws IOException;
/** /**
@ -388,14 +383,14 @@ public interface Region extends ConfigurationObserver {
* @param row to check * @param row to check
* @param family column family to check * @param family column family to check
* @param qualifier column qualifier to check * @param qualifier column qualifier to check
* @param compareOp the comparison operator * @param op the comparison operator
* @param comparator * @param comparator
* @param mutations * @param mutations
* @param writeToWAL * @param writeToWAL
* @return true if mutations were applied, false otherwise * @return true if mutations were applied, false otherwise
* @throws IOException * @throws IOException
*/ */
boolean checkAndRowMutate(byte [] row, byte [] family, byte [] qualifier, CompareOp compareOp, boolean checkAndRowMutate(byte [] row, byte [] family, byte [] qualifier, CompareOperator op,
ByteArrayComparable comparator, RowMutations mutations, boolean writeToWAL) ByteArrayComparable comparator, RowMutations mutations, boolean writeToWAL)
throws IOException; throws IOException;

View File

@ -37,6 +37,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
@ -64,7 +65,6 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType; import org.apache.hadoop.hbase.coprocessor.RegionObserver.MutationType;
import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.Reference; import org.apache.hadoop.hbase.io.Reference;
import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CacheConfig;
@ -1007,7 +1007,7 @@ public class RegionCoprocessorHost
* @param row row to check * @param row row to check
* @param family column family * @param family column family
* @param qualifier column qualifier * @param qualifier column qualifier
* @param compareOp the comparison operation * @param op the comparison operation
* @param comparator the comparator * @param comparator the comparator
* @param put data to put if check succeeds * @param put data to put if check succeeds
* @return true or false to return to client if default processing should * @return true or false to return to client if default processing should
@ -1015,7 +1015,7 @@ public class RegionCoprocessorHost
* @throws IOException e * @throws IOException e
*/ */
public Boolean preCheckAndPut(final byte [] row, final byte [] family, public Boolean preCheckAndPut(final byte [] row, final byte [] family,
final byte [] qualifier, final CompareOp compareOp, final byte [] qualifier, final CompareOperator op,
final ByteArrayComparable comparator, final Put put) final ByteArrayComparable comparator, final Put put)
throws IOException { throws IOException {
return execOperationWithResult(true, false, return execOperationWithResult(true, false,
@ -1024,7 +1024,7 @@ public class RegionCoprocessorHost
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx) public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException { throws IOException {
setResult(oserver.preCheckAndPut(ctx, row, family, qualifier, setResult(oserver.preCheckAndPut(ctx, row, family, qualifier,
compareOp, comparator, put, getResult())); op, comparator, put, getResult()));
} }
}); });
} }
@ -1033,7 +1033,7 @@ public class RegionCoprocessorHost
* @param row row to check * @param row row to check
* @param family column family * @param family column family
* @param qualifier column qualifier * @param qualifier column qualifier
* @param compareOp the comparison operation * @param op the comparison operation
* @param comparator the comparator * @param comparator the comparator
* @param put data to put if check succeeds * @param put data to put if check succeeds
* @return true or false to return to client if default processing should * @return true or false to return to client if default processing should
@ -1041,15 +1041,15 @@ public class RegionCoprocessorHost
* @throws IOException e * @throws IOException e
*/ */
public Boolean preCheckAndPutAfterRowLock(final byte[] row, final byte[] family, public Boolean preCheckAndPutAfterRowLock(final byte[] row, final byte[] family,
final byte[] qualifier, final CompareOp compareOp, final ByteArrayComparable comparator, final byte[] qualifier, final CompareOperator op, final ByteArrayComparable comparator,
final Put put) throws IOException { final Put put) throws IOException {
return execOperationWithResult(true, false, return execOperationWithResult(true, false,
coprocessors.isEmpty() ? null : new RegionOperationWithResult<Boolean>() { coprocessors.isEmpty() ? null : new RegionOperationWithResult<Boolean>() {
@Override @Override
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx) public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException { throws IOException {
setResult(oserver.preCheckAndPutAfterRowLock(ctx, row, family, qualifier, setResult(oserver.preCheckAndPutAfterRowLock(ctx, row, family, qualifier,
compareOp, comparator, put, getResult())); op, comparator, put, getResult()));
} }
}); });
} }
@ -1058,13 +1058,13 @@ public class RegionCoprocessorHost
* @param row row to check * @param row row to check
* @param family column family * @param family column family
* @param qualifier column qualifier * @param qualifier column qualifier
* @param compareOp the comparison operation * @param op the comparison operation
* @param comparator the comparator * @param comparator the comparator
* @param put data to put if check succeeds * @param put data to put if check succeeds
* @throws IOException e * @throws IOException e
*/ */
public boolean postCheckAndPut(final byte [] row, final byte [] family, public boolean postCheckAndPut(final byte [] row, final byte [] family,
final byte [] qualifier, final CompareOp compareOp, final byte [] qualifier, final CompareOperator op,
final ByteArrayComparable comparator, final Put put, final ByteArrayComparable comparator, final Put put,
boolean result) throws IOException { boolean result) throws IOException {
return execOperationWithResult(result, return execOperationWithResult(result,
@ -1073,7 +1073,7 @@ public class RegionCoprocessorHost
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx) public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException { throws IOException {
setResult(oserver.postCheckAndPut(ctx, row, family, qualifier, setResult(oserver.postCheckAndPut(ctx, row, family, qualifier,
compareOp, comparator, put, getResult())); op, comparator, put, getResult()));
} }
}); });
} }
@ -1082,7 +1082,7 @@ public class RegionCoprocessorHost
* @param row row to check * @param row row to check
* @param family column family * @param family column family
* @param qualifier column qualifier * @param qualifier column qualifier
* @param compareOp the comparison operation * @param op the comparison operation
* @param comparator the comparator * @param comparator the comparator
* @param delete delete to commit if check succeeds * @param delete delete to commit if check succeeds
* @return true or false to return to client if default processing should * @return true or false to return to client if default processing should
@ -1090,7 +1090,7 @@ public class RegionCoprocessorHost
* @throws IOException e * @throws IOException e
*/ */
public Boolean preCheckAndDelete(final byte [] row, final byte [] family, public Boolean preCheckAndDelete(final byte [] row, final byte [] family,
final byte [] qualifier, final CompareOp compareOp, final byte [] qualifier, final CompareOperator op,
final ByteArrayComparable comparator, final Delete delete) final ByteArrayComparable comparator, final Delete delete)
throws IOException { throws IOException {
return execOperationWithResult(true, false, return execOperationWithResult(true, false,
@ -1099,7 +1099,7 @@ public class RegionCoprocessorHost
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx) public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException { throws IOException {
setResult(oserver.preCheckAndDelete(ctx, row, family, setResult(oserver.preCheckAndDelete(ctx, row, family,
qualifier, compareOp, comparator, delete, getResult())); qualifier, op, comparator, delete, getResult()));
} }
}); });
} }
@ -1108,7 +1108,7 @@ public class RegionCoprocessorHost
* @param row row to check * @param row row to check
* @param family column family * @param family column family
* @param qualifier column qualifier * @param qualifier column qualifier
* @param compareOp the comparison operation * @param op the comparison operation
* @param comparator the comparator * @param comparator the comparator
* @param delete delete to commit if check succeeds * @param delete delete to commit if check succeeds
* @return true or false to return to client if default processing should * @return true or false to return to client if default processing should
@ -1116,15 +1116,15 @@ public class RegionCoprocessorHost
* @throws IOException e * @throws IOException e
*/ */
public Boolean preCheckAndDeleteAfterRowLock(final byte[] row, final byte[] family, public Boolean preCheckAndDeleteAfterRowLock(final byte[] row, final byte[] family,
final byte[] qualifier, final CompareOp compareOp, final ByteArrayComparable comparator, final byte[] qualifier, final CompareOperator op, final ByteArrayComparable comparator,
final Delete delete) throws IOException { final Delete delete) throws IOException {
return execOperationWithResult(true, false, return execOperationWithResult(true, false,
coprocessors.isEmpty() ? null : new RegionOperationWithResult<Boolean>() { coprocessors.isEmpty() ? null : new RegionOperationWithResult<Boolean>() {
@Override @Override
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx) public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException { throws IOException {
setResult(oserver.preCheckAndDeleteAfterRowLock(ctx, row, setResult(oserver.preCheckAndDeleteAfterRowLock(ctx, row,
family, qualifier, compareOp, comparator, delete, getResult())); family, qualifier, op, comparator, delete, getResult()));
} }
}); });
} }
@ -1133,13 +1133,13 @@ public class RegionCoprocessorHost
* @param row row to check * @param row row to check
* @param family column family * @param family column family
* @param qualifier column qualifier * @param qualifier column qualifier
* @param compareOp the comparison operation * @param op the comparison operation
* @param comparator the comparator * @param comparator the comparator
* @param delete delete to commit if check succeeds * @param delete delete to commit if check succeeds
* @throws IOException e * @throws IOException e
*/ */
public boolean postCheckAndDelete(final byte [] row, final byte [] family, public boolean postCheckAndDelete(final byte [] row, final byte [] family,
final byte [] qualifier, final CompareOp compareOp, final byte [] qualifier, final CompareOperator op,
final ByteArrayComparable comparator, final Delete delete, final ByteArrayComparable comparator, final Delete delete,
boolean result) throws IOException { boolean result) throws IOException {
return execOperationWithResult(result, return execOperationWithResult(result,
@ -1148,7 +1148,7 @@ public class RegionCoprocessorHost
public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx) public void call(RegionObserver oserver, ObserverContext<RegionCoprocessorEnvironment> ctx)
throws IOException { throws IOException {
setResult(oserver.postCheckAndDelete(ctx, row, family, setResult(oserver.postCheckAndDelete(ctx, row, family,
qualifier, compareOp, comparator, delete, getResult())); qualifier, op, comparator, delete, getResult()));
} }
}); });
} }

View File

@ -18,20 +18,10 @@
*/ */
package org.apache.hadoop.hbase.security.access; package org.apache.hadoop.hbase.security.access;
import java.io.IOException; import com.google.protobuf.Message;
import java.net.InetAddress; import com.google.protobuf.RpcCallback;
import java.security.PrivilegedExceptionAction; import com.google.protobuf.RpcController;
import java.util.ArrayList; import com.google.protobuf.Service;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -39,6 +29,7 @@ import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.CompoundConfiguration; import org.apache.hadoop.hbase.CompoundConfiguration;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DoNotRetryIOException;
@ -80,7 +71,6 @@ import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionServerObserver; import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
@ -110,6 +100,13 @@ import org.apache.hadoop.hbase.security.Superusers;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.security.access.Permission.Action; import org.apache.hadoop.hbase.security.access.Permission.Action;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry; import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.WALEntry;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest;
@ -123,17 +120,19 @@ import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.SimpleMutableByteRange; import org.apache.hadoop.hbase.util.SimpleMutableByteRange;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; import java.io.IOException;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableSet; import java.net.InetAddress;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ListMultimap; import java.security.PrivilegedExceptionAction;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import java.util.ArrayList;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MapMaker; import java.util.Collection;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps; import java.util.HashMap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import java.util.Iterator;
import com.google.protobuf.Message; import java.util.List;
import com.google.protobuf.RpcCallback; import java.util.Map;
import com.google.protobuf.RpcController; import java.util.Map.Entry;
import com.google.protobuf.Service; import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
/** /**
* Provides basic authorization checks for data access and administrative * Provides basic authorization checks for data access and administrative
@ -1763,7 +1762,7 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
@Override @Override
public boolean preCheckAndPut(final ObserverContext<RegionCoprocessorEnvironment> c, public boolean preCheckAndPut(final ObserverContext<RegionCoprocessorEnvironment> c,
final byte [] row, final byte [] family, final byte [] qualifier, final byte [] row, final byte [] family, final byte [] qualifier,
final CompareFilter.CompareOp compareOp, final CompareOperator op,
final ByteArrayComparable comparator, final Put put, final ByteArrayComparable comparator, final Put put,
final boolean result) throws IOException { final boolean result) throws IOException {
User user = getActiveUser(c); User user = getActiveUser(c);
@ -1797,9 +1796,10 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
@Override @Override
public boolean preCheckAndPutAfterRowLock(final ObserverContext<RegionCoprocessorEnvironment> c, public boolean preCheckAndPutAfterRowLock(final ObserverContext<RegionCoprocessorEnvironment> c,
final byte[] row, final byte[] family, final byte[] qualifier, final byte[] row, final byte[] family, final byte[] qualifier,
final CompareFilter.CompareOp compareOp, final ByteArrayComparable comparator, final Put put, final CompareOperator opp, final ByteArrayComparable comparator, final Put put,
final boolean result) throws IOException { final boolean result)
throws IOException {
if (put.getAttribute(CHECK_COVERING_PERM) != null) { if (put.getAttribute(CHECK_COVERING_PERM) != null) {
// We had failure with table, cf and q perm checks and now giving a chance for cell // We had failure with table, cf and q perm checks and now giving a chance for cell
// perm check // perm check
@ -1826,7 +1826,7 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
@Override @Override
public boolean preCheckAndDelete(final ObserverContext<RegionCoprocessorEnvironment> c, public boolean preCheckAndDelete(final ObserverContext<RegionCoprocessorEnvironment> c,
final byte [] row, final byte [] family, final byte [] qualifier, final byte [] row, final byte [] family, final byte [] qualifier,
final CompareFilter.CompareOp compareOp, final CompareOperator op,
final ByteArrayComparable comparator, final Delete delete, final ByteArrayComparable comparator, final Delete delete,
final boolean result) throws IOException { final boolean result) throws IOException {
// An ACL on a delete is useless, we shouldn't allow it // An ACL on a delete is useless, we shouldn't allow it
@ -1856,7 +1856,7 @@ public class AccessController implements MasterObserver, RegionObserver, RegionS
@Override @Override
public boolean preCheckAndDeleteAfterRowLock( public boolean preCheckAndDeleteAfterRowLock(
final ObserverContext<RegionCoprocessorEnvironment> c, final byte[] row, final byte[] family, final ObserverContext<RegionCoprocessorEnvironment> c, final byte[] row, final byte[] family,
final byte[] qualifier, final CompareFilter.CompareOp compareOp, final byte[] qualifier, final CompareOperator op,
final ByteArrayComparable comparator, final Delete delete, final boolean result) final ByteArrayComparable comparator, final Delete delete, final boolean result)
throws IOException { throws IOException {
if (delete.getAttribute(CHECK_COVERING_PERM) != null) { if (delete.getAttribute(CHECK_COVERING_PERM) != null) {

View File

@ -26,6 +26,7 @@ import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
@ -34,7 +35,7 @@ import org.apache.hadoop.hbase.io.crypto.Encryption;
import org.apache.hadoop.hbase.io.crypto.KeyStoreKeyProvider; import org.apache.hadoop.hbase.io.crypto.KeyStoreKeyProvider;
import org.apache.hadoop.hbase.security.EncryptionUtil; import org.apache.hadoop.hbase.security.EncryptionUtil;
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class EncryptionTest { public class EncryptionTest {
private static final Log LOG = LogFactory.getLog(EncryptionTest.class); private static final Log LOG = LogFactory.getLog(EncryptionTest.class);

View File

@ -18,6 +18,7 @@
*/ */
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.CompareFilter;
@ -31,12 +32,16 @@ import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestName; import org.junit.rules.TestName;
import java.io.IOException;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@Category(MediumTests.class) @Category(MediumTests.class)
public class TestCheckAndMutate { public class TestCheckAndMutate {
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final byte[] ROWKEY = Bytes.toBytes("12345");
private static final byte[] FAMILY = Bytes.toBytes("cf");
@Rule @Rule
public TestName name = new TestName(); public TestName name = new TestName();
@ -57,61 +62,86 @@ public class TestCheckAndMutate {
TEST_UTIL.shutdownMiniCluster(); TEST_UTIL.shutdownMiniCluster();
} }
private Table createTable()
throws IOException, InterruptedException {
final TableName tableName = TableName.valueOf(name.getMethodName());
Table table = TEST_UTIL.createTable(tableName, FAMILY);
TEST_UTIL.waitTableAvailable(tableName.getName(), 5000);
return table;
}
private void putOneRow(Table table) throws IOException {
Put put = new Put(ROWKEY);
put.addColumn(FAMILY, Bytes.toBytes("A"), Bytes.toBytes("a"));
put.addColumn(FAMILY, Bytes.toBytes("B"), Bytes.toBytes("b"));
put.addColumn(FAMILY, Bytes.toBytes("C"), Bytes.toBytes("c"));
table.put(put);
}
private void getOneRowAndAssertAllExist(final Table table) throws IOException {
Get get = new Get(ROWKEY);
Result result = table.get(get);
assertTrue("Column A value should be a",
Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("A"))).equals("a"));
assertTrue("Column B value should be b",
Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("B"))).equals("b"));
assertTrue("Column C value should be c",
Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("C"))).equals("c"));
}
private void getOneRowAndAssertAllButCExist(final Table table) throws IOException {
Get get = new Get(ROWKEY);
Result result = table.get(get);
assertTrue("Column A value should be a",
Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("A"))).equals("a"));
assertTrue("Column B value should be b",
Bytes.toString(result.getValue(FAMILY, Bytes.toBytes("B"))).equals("b"));
assertTrue("Column C should not exist",
result.getValue(FAMILY, Bytes.toBytes("C")) == null);
}
private RowMutations makeRowMutationsWithColumnCDeleted() throws IOException {
RowMutations rm = new RowMutations(ROWKEY, 2);
Put put = new Put(ROWKEY);
put.addColumn(FAMILY, Bytes.toBytes("A"), Bytes.toBytes("a"));
put.addColumn(FAMILY, Bytes.toBytes("B"), Bytes.toBytes("b"));
rm.add(put);
Delete del = new Delete(ROWKEY);
del.addColumn(FAMILY, Bytes.toBytes("C"));
rm.add(del);
return rm;
}
private RowMutations getBogusRowMutations() throws IOException {
Put p = new Put(ROWKEY);
byte[] value = new byte[0];
p.addColumn(new byte[]{'b', 'o', 'g', 'u', 's'}, new byte[]{'A'}, value);
RowMutations rm = new RowMutations(ROWKEY);
rm.add(p);
return rm;
}
@Test @Test
public void testCheckAndMutate() throws Throwable { public void testCheckAndMutate() throws Throwable {
final TableName tableName = TableName.valueOf(name.getMethodName()); try (Table table = createTable()) {
final byte[] rowKey = Bytes.toBytes("12345");
final byte[] family = Bytes.toBytes("cf");
Table table = TEST_UTIL.createTable(tableName, family);
TEST_UTIL.waitTableAvailable(tableName.getName(), 5000);
try {
// put one row // put one row
Put put = new Put(rowKey); putOneRow(table);
put.addColumn(family, Bytes.toBytes("A"), Bytes.toBytes("a"));
put.addColumn(family, Bytes.toBytes("B"), Bytes.toBytes("b"));
put.addColumn(family, Bytes.toBytes("C"), Bytes.toBytes("c"));
table.put(put);
// get row back and assert the values // get row back and assert the values
Get get = new Get(rowKey); getOneRowAndAssertAllExist(table);
Result result = table.get(get);
assertTrue("Column A value should be a",
Bytes.toString(result.getValue(family, Bytes.toBytes("A"))).equals("a"));
assertTrue("Column B value should be b",
Bytes.toString(result.getValue(family, Bytes.toBytes("B"))).equals("b"));
assertTrue("Column C value should be c",
Bytes.toString(result.getValue(family, Bytes.toBytes("C"))).equals("c"));
// put the same row again with C column deleted // put the same row again with C column deleted
RowMutations rm = new RowMutations(rowKey, 2); RowMutations rm = makeRowMutationsWithColumnCDeleted();
put = new Put(rowKey); boolean res = table.checkAndMutate(ROWKEY, FAMILY, Bytes.toBytes("A"),
put.addColumn(family, Bytes.toBytes("A"), Bytes.toBytes("a")); CompareFilter.CompareOp.EQUAL, Bytes.toBytes("a"), rm);
put.addColumn(family, Bytes.toBytes("B"), Bytes.toBytes("b"));
rm.add(put);
Delete del = new Delete(rowKey);
del.addColumn(family, Bytes.toBytes("C"));
rm.add(del);
boolean res = table.checkAndMutate(rowKey, family, Bytes.toBytes("A"), CompareFilter.CompareOp.EQUAL,
Bytes.toBytes("a"), rm);
assertTrue(res); assertTrue(res);
// get row back and assert the values // get row back and assert the values
get = new Get(rowKey); getOneRowAndAssertAllButCExist(table);
result = table.get(get);
assertTrue("Column A value should be a",
Bytes.toString(result.getValue(family, Bytes.toBytes("A"))).equals("a"));
assertTrue("Column B value should be b",
Bytes.toString(result.getValue(family, Bytes.toBytes("B"))).equals("b"));
assertTrue("Column C should not exist",
result.getValue(family, Bytes.toBytes("C")) == null);
//Test that we get a region level exception //Test that we get a region level exception
try { try {
Put p = new Put(rowKey); rm = getBogusRowMutations();
byte[] value = new byte[0]; table.checkAndMutate(ROWKEY, FAMILY, Bytes.toBytes("A"), CompareFilter.CompareOp.EQUAL,
p.addColumn(new byte[]{'b', 'o', 'g', 'u', 's'}, new byte[]{'A'}, value);
rm = new RowMutations(rowKey);
rm.add(p);
table.checkAndMutate(rowKey, family, Bytes.toBytes("A"), CompareFilter.CompareOp.EQUAL,
Bytes.toBytes("a"), rm); Bytes.toBytes("a"), rm);
fail("Expected NoSuchColumnFamilyException"); fail("Expected NoSuchColumnFamilyException");
} catch (RetriesExhaustedWithDetailsException e) { } catch (RetriesExhaustedWithDetailsException e) {
@ -121,8 +151,39 @@ public class TestCheckAndMutate {
// expected // expected
} }
} }
} finally { }
table.close(); }
@Test
public void testCheckAndMutateUsingNewComparisonOperatorInstead() throws Throwable {
try (Table table = createTable()) {
// put one row
putOneRow(table);
// get row back and assert the values
getOneRowAndAssertAllExist(table);
// put the same row again with C column deleted
RowMutations rm = makeRowMutationsWithColumnCDeleted();
boolean res = table.checkAndMutate(ROWKEY, FAMILY, Bytes.toBytes("A"),
CompareOperator.EQUAL, Bytes.toBytes("a"), rm);
assertTrue(res);
// get row back and assert the values
getOneRowAndAssertAllButCExist(table);
//Test that we get a region level exception
try {
rm = getBogusRowMutations();
table.checkAndMutate(ROWKEY, FAMILY, Bytes.toBytes("A"), CompareOperator.EQUAL,
Bytes.toBytes("a"), rm);
fail("Expected NoSuchColumnFamilyException");
} catch (RetriesExhaustedWithDetailsException e) {
try {
throw e.getCause(0);
} catch (NoSuchColumnFamilyException e1) {
// expected
}
}
} }
} }
} }

View File

@ -19,24 +19,11 @@
package org.apache.hadoop.hbase.coprocessor; package org.apache.hadoop.hbase.coprocessor;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
@ -49,11 +36,9 @@ import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.ByteArrayComparable; import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper; import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.Reference; import org.apache.hadoop.hbase.io.Reference;
import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.regionserver.HStoreFile;
import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.KeyValueScanner; import org.apache.hadoop.hbase.regionserver.KeyValueScanner;
import org.apache.hadoop.hbase.regionserver.Leases; import org.apache.hadoop.hbase.regionserver.Leases;
@ -66,10 +51,23 @@ import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.StoreFileReader; import org.apache.hadoop.hbase.regionserver.StoreFileReader;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ImmutableList;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.wal.WALKey; import org.apache.hadoop.hbase.wal.WALKey;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
/** /**
* A sample region observer that tests the RegionObserver interface. * A sample region observer that tests the RegionObserver interface.
* It works with TestRegionObserverInterface to provide the test case. * It works with TestRegionObserverInterface to provide the test case.
@ -542,15 +540,15 @@ public class SimpleRegionObserver implements RegionObserver {
@Override @Override
public boolean preCheckAndPut(ObserverContext<RegionCoprocessorEnvironment> e, byte[] row, public boolean preCheckAndPut(ObserverContext<RegionCoprocessorEnvironment> e, byte[] row,
byte[] family, byte[] qualifier, CompareOp compareOp, ByteArrayComparable comparator, byte[] family, byte[] qualifier, CompareOperator compareOp, ByteArrayComparable comparator,
Put put, boolean result) throws IOException { Put put, boolean result) throws IOException {
ctPreCheckAndPut.incrementAndGet(); ctPreCheckAndPut.incrementAndGet();
return true; return true;
} }
@Override @Override
public boolean preCheckAndPutAfterRowLock(ObserverContext<RegionCoprocessorEnvironment> e, public boolean preCheckAndPutAfterRowLock(ObserverContext<RegionCoprocessorEnvironment> e,
byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp, byte[] row, byte[] family, byte[] qualifier, CompareOperator compareOp,
ByteArrayComparable comparator, Put put, boolean result) throws IOException { ByteArrayComparable comparator, Put put, boolean result) throws IOException {
ctPreCheckAndPutAfterRowLock.incrementAndGet(); ctPreCheckAndPutAfterRowLock.incrementAndGet();
return true; return true;
@ -558,23 +556,23 @@ public class SimpleRegionObserver implements RegionObserver {
@Override @Override
public boolean postCheckAndPut(ObserverContext<RegionCoprocessorEnvironment> e, byte[] row, public boolean postCheckAndPut(ObserverContext<RegionCoprocessorEnvironment> e, byte[] row,
byte[] family, byte[] qualifier, CompareOp compareOp, ByteArrayComparable comparator, byte[] family, byte[] qualifier, CompareOperator compareOp, ByteArrayComparable comparator,
Put put, boolean result) throws IOException { Put put, boolean result) throws IOException {
ctPostCheckAndPut.incrementAndGet(); ctPostCheckAndPut.incrementAndGet();
return true; return true;
} }
@Override @Override
public boolean preCheckAndDelete(ObserverContext<RegionCoprocessorEnvironment> e, byte[] row, public boolean preCheckAndDelete(ObserverContext<RegionCoprocessorEnvironment> e, byte[] row,
byte[] family, byte[] qualifier, CompareOp compareOp, ByteArrayComparable comparator, byte[] family, byte[] qualifier, CompareOperator compareOp, ByteArrayComparable comparator,
Delete delete, boolean result) throws IOException { Delete delete, boolean result) throws IOException {
ctPreCheckAndDelete.incrementAndGet(); ctPreCheckAndDelete.incrementAndGet();
return true; return true;
} }
@Override @Override
public boolean preCheckAndDeleteAfterRowLock(ObserverContext<RegionCoprocessorEnvironment> e, public boolean preCheckAndDeleteAfterRowLock(ObserverContext<RegionCoprocessorEnvironment> e,
byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp, byte[] row, byte[] family, byte[] qualifier, CompareOperator compareOp,
ByteArrayComparable comparator, Delete delete, boolean result) throws IOException { ByteArrayComparable comparator, Delete delete, boolean result) throws IOException {
ctPreCheckAndDeleteAfterRowLock.incrementAndGet(); ctPreCheckAndDeleteAfterRowLock.incrementAndGet();
return true; return true;
@ -582,8 +580,8 @@ public class SimpleRegionObserver implements RegionObserver {
@Override @Override
public boolean postCheckAndDelete(ObserverContext<RegionCoprocessorEnvironment> e, byte[] row, public boolean postCheckAndDelete(ObserverContext<RegionCoprocessorEnvironment> e, byte[] row,
byte[] family, byte[] qualifier, CompareOp compareOp, ByteArrayComparable comparator, byte[] family, byte[] qualifier, CompareOperator compareOp, ByteArrayComparable comparator,
Delete delete, boolean result) throws IOException { Delete delete, boolean result) throws IOException {
ctPostCheckAndDelete.incrementAndGet(); ctPostCheckAndDelete.incrementAndGet();
return true; return true;
} }

View File

@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
@ -224,6 +225,13 @@ public class RegionAsTable implements Table {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOperator compareOp, byte[] value, Put put)
throws IOException {
throw new UnsupportedOperationException();
}
@Override @Override
public void delete(Delete delete) throws IOException { public void delete(Delete delete) throws IOException {
this.region.delete(delete); this.region.delete(delete);
@ -248,6 +256,13 @@ public class RegionAsTable implements Table {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOperator compareOp, byte[] value, Delete delete)
throws IOException {
throw new UnsupportedOperationException();
}
@Override @Override
public void mutateRow(RowMutations rm) throws IOException { public void mutateRow(RowMutations rm) throws IOException {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
@ -324,6 +339,14 @@ public class RegionAsTable implements Table {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@Override
public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOperator compareOp,
byte[] value, RowMutations mutation)
throws IOException {
throw new UnsupportedOperationException();
}
@Override @Override
public void setOperationTimeout(int operationTimeout) { public void setOperationTimeout(int operationTimeout) {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();

View File

@ -16,21 +16,6 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -39,6 +24,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
@ -61,7 +47,6 @@ import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
@ -75,6 +60,21 @@ import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestName; import org.junit.rules.TestName;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/** /**
* Testing of HRegion.incrementColumnValue, HRegion.increment, * Testing of HRegion.incrementColumnValue, HRegion.increment,
* and HRegion.append * and HRegion.append
@ -653,7 +653,7 @@ public class TestAtomicOperation {
} }
testStep = TestStep.CHECKANDPUT_STARTED; testStep = TestStep.CHECKANDPUT_STARTED;
region.checkAndMutate(Bytes.toBytes("r1"), Bytes.toBytes(family), Bytes.toBytes("q1"), region.checkAndMutate(Bytes.toBytes("r1"), Bytes.toBytes(family), Bytes.toBytes("q1"),
CompareOp.EQUAL, new BinaryComparator(Bytes.toBytes("10")), put, true); CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("10")), put, true);
testStep = TestStep.CHECKANDPUT_COMPLETED; testStep = TestStep.CHECKANDPUT_COMPLETED;
} }
} }

View File

@ -18,54 +18,6 @@
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.HBaseTestingUtility.COLUMNS;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam3;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -78,6 +30,7 @@ import org.apache.hadoop.hbase.ArrayBackedTag;
import org.apache.hadoop.hbase.CategoryBasedTimeout; import org.apache.hadoop.hbase.CategoryBasedTimeout;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.DroppedSnapshotException; import org.apache.hadoop.hbase.DroppedSnapshotException;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
@ -140,6 +93,8 @@ import org.apache.hadoop.hbase.regionserver.wal.WALActionsListener;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.regionserver.wal.WALUtil; import org.apache.hadoop.hbase.regionserver.wal.WALUtil;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Maps;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor; import org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.CompactionDescriptor;
@ -181,6 +136,51 @@ import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock; import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer; import org.mockito.stubbing.Answer;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import static org.apache.hadoop.hbase.HBaseTestingUtility.COLUMNS;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam3;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/** /**
* Basic stand-alone testing of HRegion. No clusters! * Basic stand-alone testing of HRegion. No clusters!
* *
@ -1713,7 +1713,7 @@ public class TestHRegion {
put.addColumn(fam1, qf1, emptyVal); put.addColumn(fam1, qf1, emptyVal);
// checkAndPut with empty value // checkAndPut with empty value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator( boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(
emptyVal), put, true); emptyVal), put, true);
assertTrue(res); assertTrue(res);
@ -1722,25 +1722,25 @@ public class TestHRegion {
put.addColumn(fam1, qf1, val1); put.addColumn(fam1, qf1, val1);
// checkAndPut with correct value // checkAndPut with correct value
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal), res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(emptyVal),
put, true); put, true);
assertTrue(res); assertTrue(res);
// not empty anymore // not empty anymore
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal), res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(emptyVal),
put, true); put, true);
assertFalse(res); assertFalse(res);
Delete delete = new Delete(row1); Delete delete = new Delete(row1);
delete.addColumn(fam1, qf1); delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal), res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(emptyVal),
delete, true); delete, true);
assertFalse(res); assertFalse(res);
put = new Put(row1); put = new Put(row1);
put.addColumn(fam1, qf1, val2); put.addColumn(fam1, qf1, val2);
// checkAndPut with correct value // checkAndPut with correct value
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val1), res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(val1),
put, true); put, true);
assertTrue(res); assertTrue(res);
@ -1748,12 +1748,12 @@ public class TestHRegion {
delete = new Delete(row1); delete = new Delete(row1);
delete.addColumn(fam1, qf1); delete.addColumn(fam1, qf1);
delete.addColumn(fam1, qf1); delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val2), res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(val2),
delete, true); delete, true);
assertTrue(res); assertTrue(res);
delete = new Delete(row1); delete = new Delete(row1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal), res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(emptyVal),
delete, true); delete, true);
assertTrue(res); assertTrue(res);
@ -1762,7 +1762,7 @@ public class TestHRegion {
put.addColumn(fam1, qf1, val1); put.addColumn(fam1, qf1, val1);
res = region res = region
.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new NullComparator(), put, true); .checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new NullComparator(), put, true);
assertTrue(res); assertTrue(res);
} finally { } finally {
HBaseTestingUtility.closeRegionAndWAL(this.region); HBaseTestingUtility.closeRegionAndWAL(this.region);
@ -1787,14 +1787,14 @@ public class TestHRegion {
region.put(put); region.put(put);
// checkAndPut with wrong value // checkAndPut with wrong value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator( boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(
val2), put, true); val2), put, true);
assertEquals(false, res); assertEquals(false, res);
// checkAndDelete with wrong value // checkAndDelete with wrong value
Delete delete = new Delete(row1); Delete delete = new Delete(row1);
delete.addFamily(fam1); delete.addFamily(fam1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val2), res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(val2),
put, true); put, true);
assertEquals(false, res); assertEquals(false, res);
} finally { } finally {
@ -1819,14 +1819,14 @@ public class TestHRegion {
region.put(put); region.put(put);
// checkAndPut with correct value // checkAndPut with correct value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator( boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(
val1), put, true); val1), put, true);
assertEquals(true, res); assertEquals(true, res);
// checkAndDelete with correct value // checkAndDelete with correct value
Delete delete = new Delete(row1); Delete delete = new Delete(row1);
delete.addColumn(fam1, qf1); delete.addColumn(fam1, qf1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val1), res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(val1),
delete, true); delete, true);
assertEquals(true, res); assertEquals(true, res);
} finally { } finally {
@ -1854,12 +1854,12 @@ public class TestHRegion {
region.put(put); region.put(put);
// Test CompareOp.LESS: original = val3, compare with val3, fail // Test CompareOp.LESS: original = val3, compare with val3, fail
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOp.LESS, boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS,
new BinaryComparator(val3), put, true); new BinaryComparator(val3), put, true);
assertEquals(false, res); assertEquals(false, res);
// Test CompareOp.LESS: original = val3, compare with val4, fail // Test CompareOp.LESS: original = val3, compare with val4, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.LESS, res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS,
new BinaryComparator(val4), put, true); new BinaryComparator(val4), put, true);
assertEquals(false, res); assertEquals(false, res);
@ -1867,18 +1867,18 @@ public class TestHRegion {
// succeed (now value = val2) // succeed (now value = val2)
put = new Put(row1); put = new Put(row1);
put.addColumn(fam1, qf1, val2); put.addColumn(fam1, qf1, val2);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.LESS, res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS,
new BinaryComparator(val2), put, true); new BinaryComparator(val2), put, true);
assertEquals(true, res); assertEquals(true, res);
// Test CompareOp.LESS_OR_EQUAL: original = val2, compare with val3, fail // Test CompareOp.LESS_OR_EQUAL: original = val2, compare with val3, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.LESS_OR_EQUAL, res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS_OR_EQUAL,
new BinaryComparator(val3), put, true); new BinaryComparator(val3), put, true);
assertEquals(false, res); assertEquals(false, res);
// Test CompareOp.LESS_OR_EQUAL: original = val2, compare with val2, // Test CompareOp.LESS_OR_EQUAL: original = val2, compare with val2,
// succeed (value still = val2) // succeed (value still = val2)
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.LESS_OR_EQUAL, res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS_OR_EQUAL,
new BinaryComparator(val2), put, true); new BinaryComparator(val2), put, true);
assertEquals(true, res); assertEquals(true, res);
@ -1886,17 +1886,17 @@ public class TestHRegion {
// succeed (now value = val3) // succeed (now value = val3)
put = new Put(row1); put = new Put(row1);
put.addColumn(fam1, qf1, val3); put.addColumn(fam1, qf1, val3);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.LESS_OR_EQUAL, res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.LESS_OR_EQUAL,
new BinaryComparator(val1), put, true); new BinaryComparator(val1), put, true);
assertEquals(true, res); assertEquals(true, res);
// Test CompareOp.GREATER: original = val3, compare with val3, fail // Test CompareOp.GREATER: original = val3, compare with val3, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.GREATER, res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER,
new BinaryComparator(val3), put, true); new BinaryComparator(val3), put, true);
assertEquals(false, res); assertEquals(false, res);
// Test CompareOp.GREATER: original = val3, compare with val2, fail // Test CompareOp.GREATER: original = val3, compare with val2, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.GREATER, res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER,
new BinaryComparator(val2), put, true); new BinaryComparator(val2), put, true);
assertEquals(false, res); assertEquals(false, res);
@ -1904,23 +1904,23 @@ public class TestHRegion {
// succeed (now value = val2) // succeed (now value = val2)
put = new Put(row1); put = new Put(row1);
put.addColumn(fam1, qf1, val2); put.addColumn(fam1, qf1, val2);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.GREATER, res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER,
new BinaryComparator(val4), put, true); new BinaryComparator(val4), put, true);
assertEquals(true, res); assertEquals(true, res);
// Test CompareOp.GREATER_OR_EQUAL: original = val2, compare with val1, fail // Test CompareOp.GREATER_OR_EQUAL: original = val2, compare with val1, fail
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.GREATER_OR_EQUAL, res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER_OR_EQUAL,
new BinaryComparator(val1), put, true); new BinaryComparator(val1), put, true);
assertEquals(false, res); assertEquals(false, res);
// Test CompareOp.GREATER_OR_EQUAL: original = val2, compare with val2, // Test CompareOp.GREATER_OR_EQUAL: original = val2, compare with val2,
// succeed (value still = val2) // succeed (value still = val2)
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.GREATER_OR_EQUAL, res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER_OR_EQUAL,
new BinaryComparator(val2), put, true); new BinaryComparator(val2), put, true);
assertEquals(true, res); assertEquals(true, res);
// Test CompareOp.GREATER_OR_EQUAL: original = val2, compare with val3, succeed // Test CompareOp.GREATER_OR_EQUAL: original = val2, compare with val3, succeed
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.GREATER_OR_EQUAL, res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.GREATER_OR_EQUAL,
new BinaryComparator(val3), put, true); new BinaryComparator(val3), put, true);
assertEquals(true, res); assertEquals(true, res);
} finally { } finally {
@ -1955,7 +1955,7 @@ public class TestHRegion {
put.add(kv); put.add(kv);
// checkAndPut with wrong value // checkAndPut with wrong value
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator( boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(
val1), put, true); val1), put, true);
assertEquals(true, res); assertEquals(true, res);
@ -1982,7 +1982,7 @@ public class TestHRegion {
Put put = new Put(row2); Put put = new Put(row2);
put.addColumn(fam1, qual1, value1); put.addColumn(fam1, qual1, value1);
try { try {
region.checkAndMutate(row, fam1, qual1, CompareOp.EQUAL, region.checkAndMutate(row, fam1, qual1, CompareOperator.EQUAL,
new BinaryComparator(value2), put, false); new BinaryComparator(value2), put, false);
fail(); fail();
} catch (org.apache.hadoop.hbase.DoNotRetryIOException expected) { } catch (org.apache.hadoop.hbase.DoNotRetryIOException expected) {
@ -2031,7 +2031,7 @@ public class TestHRegion {
delete.addColumn(fam1, qf1); delete.addColumn(fam1, qf1);
delete.addColumn(fam2, qf1); delete.addColumn(fam2, qf1);
delete.addColumn(fam1, qf3); delete.addColumn(fam1, qf3);
boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator( boolean res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(
val2), delete, true); val2), delete, true);
assertEquals(true, res); assertEquals(true, res);
@ -2047,7 +2047,7 @@ public class TestHRegion {
// Family delete // Family delete
delete = new Delete(row1); delete = new Delete(row1);
delete.addFamily(fam2); delete.addFamily(fam2);
res = region.checkAndMutate(row1, fam2, qf1, CompareOp.EQUAL, new BinaryComparator(emptyVal), res = region.checkAndMutate(row1, fam2, qf1, CompareOperator.EQUAL, new BinaryComparator(emptyVal),
delete, true); delete, true);
assertEquals(true, res); assertEquals(true, res);
@ -2058,7 +2058,7 @@ public class TestHRegion {
// Row delete // Row delete
delete = new Delete(row1); delete = new Delete(row1);
res = region.checkAndMutate(row1, fam1, qf1, CompareOp.EQUAL, new BinaryComparator(val1), res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new BinaryComparator(val1),
delete, true); delete, true);
assertEquals(true, res); assertEquals(true, res);
get = new Get(row1); get = new Get(row1);
@ -6279,7 +6279,7 @@ public class TestHRegion {
p = new Put(row); p = new Put(row);
p.setDurability(Durability.SKIP_WAL); p.setDurability(Durability.SKIP_WAL);
p.addColumn(fam1, qual1, qual2); p.addColumn(fam1, qual1, qual2);
region.checkAndMutate(row, fam1, qual1, CompareOp.EQUAL, new BinaryComparator(qual1), p, false); region.checkAndMutate(row, fam1, qual1, CompareOperator.EQUAL, new BinaryComparator(qual1), p, false);
result = region.get(new Get(row)); result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1); c = result.getColumnLatestCell(fam1, qual1);
assertEquals(c.getTimestamp(), 10L); assertEquals(c.getTimestamp(), 10L);
@ -6373,7 +6373,7 @@ public class TestHRegion {
p.addColumn(fam1, qual1, qual2); p.addColumn(fam1, qual1, qual2);
RowMutations rm = new RowMutations(row); RowMutations rm = new RowMutations(row);
rm.add(p); rm.add(p);
assertTrue(region.checkAndRowMutate(row, fam1, qual1, CompareOp.EQUAL, assertTrue(region.checkAndRowMutate(row, fam1, qual1, CompareOperator.EQUAL,
new BinaryComparator(qual1), rm, false)); new BinaryComparator(qual1), rm, false));
result = region.get(new Get(row)); result = region.get(new Get(row));
c = result.getColumnLatestCell(fam1, qual1); c = result.getColumnLatestCell(fam1, qual1);

View File

@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
@ -55,7 +56,6 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.master.MasterCoprocessorHost; import org.apache.hadoop.hbase.master.MasterCoprocessorHost;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas; import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.Quotas;
import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress; import org.apache.hadoop.hbase.regionserver.MiniBatchOperationInProgress;
@ -941,7 +941,7 @@ public class TestWithDisabledAuthorization extends SecureTestUtil {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
ACCESS_CONTROLLER.preCheckAndPut(ObserverContext.createAndPrepare(RCP_ENV, null), ACCESS_CONTROLLER.preCheckAndPut(ObserverContext.createAndPrepare(RCP_ENV, null),
TEST_ROW, TEST_FAMILY, TEST_Q1, CompareFilter.CompareOp.EQUAL, TEST_ROW, TEST_FAMILY, TEST_Q1, CompareOperator.EQUAL,
new BinaryComparator("foo".getBytes()), new Put(TEST_ROW), true); new BinaryComparator("foo".getBytes()), new Put(TEST_ROW), true);
return null; return null;
} }
@ -952,7 +952,7 @@ public class TestWithDisabledAuthorization extends SecureTestUtil {
@Override @Override
public Object run() throws Exception { public Object run() throws Exception {
ACCESS_CONTROLLER.preCheckAndDelete(ObserverContext.createAndPrepare(RCP_ENV, null), ACCESS_CONTROLLER.preCheckAndDelete(ObserverContext.createAndPrepare(RCP_ENV, null),
TEST_ROW, TEST_FAMILY, TEST_Q1, CompareFilter.CompareOp.EQUAL, TEST_ROW, TEST_FAMILY, TEST_Q1, CompareOperator.EQUAL,
new BinaryComparator("foo".getBytes()), new Delete(TEST_ROW), true); new BinaryComparator("foo".getBytes()), new Delete(TEST_ROW), true);
return null; return null;
} }