From 1f0e43a27a2b5dc0dd4b6edce608a398d33b1807 Mon Sep 17 00:00:00 2001 From: Jan Hentschel Date: Sat, 13 Apr 2019 13:39:37 +0200 Subject: [PATCH] HBASE-22232 Removed deprecated methods in CompareFilter --- .../apache/hadoop/hbase/client/HTable.java | 23 ---- .../org/apache/hadoop/hbase/client/Table.java | 85 ------------ .../hadoop/hbase/filter/CompareFilter.java | 124 ------------------ .../hbase/filter/DependentColumnFilter.java | 25 +--- .../hadoop/hbase/filter/FamilyFilter.java | 16 --- .../hadoop/hbase/filter/ParseFilter.java | 30 +---- .../hadoop/hbase/filter/QualifierFilter.java | 15 --- .../apache/hadoop/hbase/filter/RowFilter.java | 15 --- .../SingleColumnValueExcludeFilter.java | 64 --------- .../hbase/filter/SingleColumnValueFilter.java | 75 +---------- .../hadoop/hbase/filter/ValueFilter.java | 14 -- .../hadoop/hbase/client/TestOperation.java | 4 +- .../hbase/rest/client/RemoteHTable.java | 23 ---- .../hadoop/hbase/rest/model/ScannerModel.java | 5 +- .../hbase/filter/TestFilterListOnMini.java | 5 +- .../hbase/regionserver/RegionAsTable.java | 25 ---- .../hbase/regionserver/TestHRegion.java | 13 +- .../regionserver/TestIsDeleteFailure.java | 4 +- .../regionserver/TestJoinedScanners.java | 3 +- .../TestRegionServerReadRequestMetrics.java | 8 +- .../regionserver/TestReversibleScanners.java | 8 +- .../regionserver/TestSCVFWithMiniCluster.java | 4 +- .../regionserver/TestWALEntrySinkFilter.java | 16 --- .../apache/hadoop/hbase/thrift2/hbase.thrift | 6 +- 24 files changed, 34 insertions(+), 576 deletions(-) diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java index e357a8c37e1..bef7a51926f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -49,7 +49,6 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback; import org.apache.hadoop.hbase.filter.BinaryComparator; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.ipc.RpcControllerFactory; @@ -667,13 +666,6 @@ public class HTable implements Table { return doCheckAndPut(row, family, qualifier, CompareOperator.EQUAL.name(), value, null, put); } - @Override - @Deprecated - public boolean checkAndPut(final byte [] row, final byte [] family, final byte [] qualifier, - final CompareOp compareOp, final byte [] value, final Put put) throws IOException { - return doCheckAndPut(row, family, qualifier, compareOp.name(), value, null, put); - } - @Override @Deprecated public boolean checkAndPut(final byte [] row, final byte [] family, final byte [] qualifier, @@ -711,13 +703,6 @@ public class HTable implements Table { delete); } - @Override - @Deprecated - public boolean checkAndDelete(final byte[] row, final byte[] family, final byte[] qualifier, - final CompareOp compareOp, final byte[] value, final Delete delete) throws IOException { - return doCheckAndDelete(row, family, qualifier, compareOp.name(), value, null, delete); - } - @Override @Deprecated public boolean checkAndDelete(final byte[] row, final byte[] family, final byte[] qualifier, @@ -816,14 +801,6 @@ public class HTable implements Table { return ((Result)results[0]).getExists(); } - @Override - @Deprecated - public boolean checkAndMutate(final byte [] row, final byte [] family, final byte [] qualifier, - final CompareOp compareOp, final byte [] value, final RowMutations rm) - throws IOException { - return doCheckAndMutate(row, family, qualifier, compareOp.name(), value, null, rm); - } - @Override @Deprecated public boolean checkAndMutate(final byte [] row, final byte [] family, final byte [] qualifier, diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java index 068f15dfa29..9268b13dfeb 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java @@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.coprocessor.Batch; -import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.util.Bytes; @@ -305,35 +304,6 @@ public interface Table extends Closeable { return checkAndPut(row, family, qualifier, CompareOperator.EQUAL, value, put); } - /** - * Atomically checks if a row/family/qualifier value matches the expected - * value. If it does, it adds the put. If the passed value is null, the check - * is for the lack of column (ie: non-existence) - * - * The expected value argument of this call is on the left and the current - * value of the cell is on the right side of the comparison operator. - * - * Ie. eg. GREATER operator means expected value > existing <=> add the put. - * - * @param row to check - * @param family column family to check - * @param qualifier column qualifier to check - * @param compareOp comparison operator to use - * @param value the expected value - * @param put data to put if check succeeds - * @throws IOException e - * @return true if the new put was executed, false otherwise - * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use {@link #checkAndMutate(byte[], byte[])} - */ - @Deprecated - default boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, - CompareFilter.CompareOp compareOp, byte[] value, Put put) throws IOException { - RowMutations mutations = new RowMutations(put.getRow(), 1); - mutations.add(put); - - return checkAndMutate(row, family, qualifier, compareOp, value, mutations); - } - /** * Atomically checks if a row/family/qualifier value matches the expected * value. If it does, it adds the put. If the passed value is null, the check @@ -421,35 +391,6 @@ public interface Table extends Closeable { return checkAndDelete(row, family, qualifier, CompareOperator.EQUAL, value, delete); } - /** - * Atomically checks if a row/family/qualifier value matches the expected - * value. If it does, it adds the delete. If the passed value is null, the - * check is for the lack of column (ie: non-existence) - * - * The expected value argument of this call is on the left and the current - * value of the cell is on the right side of the comparison operator. - * - * Ie. eg. GREATER operator means expected value > existing <=> add the delete. - * - * @param row to check - * @param family column family to check - * @param qualifier column qualifier to check - * @param compareOp comparison operator to use - * @param value the expected value - * @param delete data to delete if check succeeds - * @throws IOException e - * @return true if the new delete was executed, false otherwise - * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use {@link #checkAndMutate(byte[], byte[])} - */ - @Deprecated - default boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, - CompareFilter.CompareOp compareOp, byte[] value, Delete delete) throws IOException { - RowMutations mutations = new RowMutations(delete.getRow(), 1); - mutations.add(delete); - - return checkAndMutate(row, family, qualifier, compareOp, value, mutations); - } - /** * Atomically checks if a row/family/qualifier value matches the expected * value. If it does, it adds the delete. If the passed value is null, the @@ -790,32 +731,6 @@ public interface Table extends Closeable { throw new NotImplementedException("Add an implementation!"); } - /** - * Atomically checks if a row/family/qualifier value matches the expected value. - * If it does, it performs the row mutations. If the passed value is null, the check - * is for the lack of column (ie: non-existence) - * - * The expected value argument of this call is on the left and the current - * value of the cell is on the right side of the comparison operator. - * - * Ie. eg. GREATER operator means expected value > existing <=> perform row mutations. - * - * @param row to check - * @param family column family to check - * @param qualifier column qualifier to check - * @param compareOp the comparison operator - * @param value the expected value - * @param mutation mutations to perform if check succeeds - * @throws IOException e - * @return true if the new put was executed, false otherwise - * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use {@link #checkAndMutate(byte[], byte[])} - */ - @Deprecated - default boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, - CompareFilter.CompareOp compareOp, byte[] value, RowMutations mutation) throws IOException { - throw new NotImplementedException("Add an implementation!"); - } - /** * Atomically checks if a row/family/qualifier value matches the expected value. * If it does, it performs the row mutations. If the passed value is null, the check diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java index 5f8346ef812..f6b63ec59e8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java @@ -16,7 +16,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.filter; import java.io.IOException; @@ -55,46 +54,9 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType */ @InterfaceAudience.Public public abstract class CompareFilter extends FilterBase { - /** - * Comparison operators. For filters only! - * Use {@link CompareOperator} otherwise. - * It (intentionally) has at least the below enums with same names. - * @deprecated since 2.0.0. Will be removed in 3.0.0. Use {@link CompareOperator} instead. - */ - @Deprecated - @InterfaceAudience.Public - public enum CompareOp { - /** less than */ - LESS, - /** less than or equal to */ - LESS_OR_EQUAL, - /** equals */ - EQUAL, - /** not equal */ - NOT_EQUAL, - /** greater than or equal to */ - GREATER_OR_EQUAL, - /** greater than */ - GREATER, - /** no operation */ - NO_OP, - } - protected CompareOperator op; protected ByteArrayComparable comparator; - /** - * Constructor. - * @param compareOp the compare op for row matching - * @param comparator the comparator for row matching - * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use other constructor. - */ - @Deprecated - public CompareFilter(final CompareOp compareOp, - final ByteArrayComparable comparator) { - this(CompareOperator.valueOf(compareOp.name()), comparator); - } - /** * Constructor. * @param op the compare op for row matching @@ -106,15 +68,6 @@ public abstract class CompareFilter extends FilterBase { this.comparator = comparator; } - /** - * @return operator - * @deprecated since 2.0.0. Will be removed in 3.0.0. Use {@link #getCompareOperator()} instead. - */ - @Deprecated - public CompareOp getOperator() { - return CompareOp.valueOf(op.name()); - } - public CompareOperator getCompareOperator() { return op; } @@ -132,20 +85,6 @@ public abstract class CompareFilter extends FilterBase { return false; } - /** - * @deprecated Since 2.0.0. Will be removed in 3.0.0. - * Use {@link #compareRow(CompareOperator, ByteArrayComparable, Cell)} - */ - @Deprecated - protected boolean compareRow(final CompareOp compareOp, final ByteArrayComparable comparator, - final Cell cell) { - if (compareOp == CompareOp.NO_OP) { - return true; - } - int compareResult = PrivateCellUtil.compareRow(cell, comparator); - return compare(compareOp, compareResult); - } - protected boolean compareRow(final CompareOperator op, final ByteArrayComparable comparator, final Cell cell) { if (op == CompareOperator.NO_OP) { @@ -155,20 +94,6 @@ public abstract class CompareFilter extends FilterBase { return compare(op, compareResult); } - /** - * @deprecated Since 2.0.0. Will be removed in 3.0.0. - * Use {@link #compareFamily(CompareOperator, ByteArrayComparable, Cell)} - */ - @Deprecated - protected boolean compareFamily(final CompareOp compareOp, final ByteArrayComparable comparator, - final Cell cell) { - if (compareOp == CompareOp.NO_OP) { - return true; - } - int compareResult = PrivateCellUtil.compareFamily(cell, comparator); - return compare(compareOp, compareResult); - } - protected boolean compareFamily(final CompareOperator op, final ByteArrayComparable comparator, final Cell cell) { if (op == CompareOperator.NO_OP) { @@ -178,21 +103,6 @@ public abstract class CompareFilter extends FilterBase { return compare(op, compareResult); } - /** - * @deprecated Since 2.0.0. Will be removed in 3.0.0. - * Use {@link #compareQualifier(CompareOperator, ByteArrayComparable, Cell)} - */ - @Deprecated - protected boolean compareQualifier(final CompareOp compareOp, - final ByteArrayComparable comparator, final Cell cell) { - // We do not call through to the non-deprecated method for perf reasons. - if (compareOp == CompareOp.NO_OP) { - return true; - } - int compareResult = PrivateCellUtil.compareQualifier(cell, comparator); - return compare(compareOp, compareResult); - } - protected boolean compareQualifier(final CompareOperator op, final ByteArrayComparable comparator, final Cell cell) { // We do not call through to the non-deprecated method for perf reasons. @@ -203,21 +113,6 @@ public abstract class CompareFilter extends FilterBase { return compare(op, compareResult); } - /** - * @deprecated Since 2.0.0. Will be removed in 3.0.0. - * Use {@link #compareValue(CompareOperator, ByteArrayComparable, Cell)} - */ - @Deprecated - protected boolean compareValue(final CompareOp compareOp, final ByteArrayComparable comparator, - final Cell cell) { - // We do not call through to the non-deprecated method for perf reasons. - if (compareOp == CompareOp.NO_OP) { - return true; - } - int compareResult = PrivateCellUtil.compareValue(cell, comparator); - return compare(compareOp, compareResult); - } - protected boolean compareValue(final CompareOperator op, final ByteArrayComparable comparator, final Cell cell) { if (op == CompareOperator.NO_OP) { @@ -227,25 +122,6 @@ public abstract class CompareFilter extends FilterBase { return compare(op, compareResult); } - static boolean compare(final CompareOp op, int compareResult) { - switch (op) { - case LESS: - return compareResult <= 0; - case LESS_OR_EQUAL: - return compareResult < 0; - case EQUAL: - return compareResult != 0; - case NOT_EQUAL: - return compareResult == 0; - case GREATER_OR_EQUAL: - return compareResult > 0; - case GREATER: - return compareResult >= 0; - default: - throw new RuntimeException("Unknown Compare op " + op.name()); - } - } - static boolean compare(final CompareOperator op, int compareResult) { switch (op) { case LESS: diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java index 4d7f681f0a7..857bfacdce6 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java @@ -54,29 +54,6 @@ public class DependentColumnFilter extends CompareFilter { protected boolean dropDependentColumn; protected Set stampSet = new HashSet<>(); - - /** - * Build a dependent column filter with value checking - * dependent column varies will be compared using the supplied - * compareOp and comparator, for usage of which - * refer to {@link CompareFilter} - * - * @param family dependent column family - * @param qualifier dependent column qualifier - * @param dropDependentColumn whether the column should be discarded after - * @param valueCompareOp comparison op - * @param valueComparator comparator - * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use - * {@link #DependentColumnFilter(byte[], byte[], boolean, CompareOperator, ByteArrayComparable)} - * instead. - */ - @Deprecated - public DependentColumnFilter(final byte [] family, final byte[] qualifier, - final boolean dropDependentColumn, final CompareOp valueCompareOp, - final ByteArrayComparable valueComparator) { - this(family, qualifier, dropDependentColumn, CompareOperator.valueOf(valueCompareOp.name()), - valueComparator); - } /** * Build a dependent column filter with value checking @@ -123,7 +100,7 @@ public class DependentColumnFilter extends CompareFilter { */ public DependentColumnFilter(final byte [] family, final byte [] qualifier, final boolean dropDependentColumn) { - this(family, qualifier, dropDependentColumn, CompareOp.NO_OP, null); + this(family, qualifier, dropDependentColumn, CompareOperator.NO_OP, null); } /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java index fb544199c89..74c6d293bda 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java @@ -16,7 +16,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.filter; import java.io.IOException; @@ -46,21 +45,6 @@ import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferExce */ @InterfaceAudience.Public public class FamilyFilter extends CompareFilter { - - /** - * Constructor. - * - * @param familyCompareOp the compare op for column family matching - * @param familyComparator the comparator for column family matching - * @deprecated Since 2.0.0. Will be removed in 3.0.0. - * Use {@link #FamilyFilter(CompareOperator, ByteArrayComparable)} - */ - @Deprecated - public FamilyFilter(final CompareOp familyCompareOp, - final ByteArrayComparable familyComparator) { - super(familyCompareOp, familyComparator); - } - /** * Constructor. * diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java index 5428ed87441..1aeaa13f5a9 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java @@ -32,11 +32,10 @@ import java.util.Set; import java.util.Stack; import org.apache.hadoop.hbase.CompareOperator; +import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; -import org.apache.hadoop.hbase.util.Bytes; /** * This class allows a user to specify a filter via a string @@ -793,33 +792,6 @@ public class ParseFilter { throw new IllegalArgumentException("Invalid compare operator"); } - /** - * Takes a compareOperator symbol as a byte array and returns the corresponding CompareOperator - * @deprecated Since 2.0 - *

- * @param compareOpAsByteArray the comparatorOperator symbol as a byte array - * @return the Compare Operator - * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use {@link #createCompareOperator(byte [])} - */ - @Deprecated - public static CompareFilter.CompareOp createCompareOp (byte [] compareOpAsByteArray) { - ByteBuffer compareOp = ByteBuffer.wrap(compareOpAsByteArray); - if (compareOp.equals(ParseConstants.LESS_THAN_BUFFER)) - return CompareOp.LESS; - else if (compareOp.equals(ParseConstants.LESS_THAN_OR_EQUAL_TO_BUFFER)) - return CompareOp.LESS_OR_EQUAL; - else if (compareOp.equals(ParseConstants.GREATER_THAN_BUFFER)) - return CompareOp.GREATER; - else if (compareOp.equals(ParseConstants.GREATER_THAN_OR_EQUAL_TO_BUFFER)) - return CompareOp.GREATER_OR_EQUAL; - else if (compareOp.equals(ParseConstants.NOT_EQUAL_TO_BUFFER)) - return CompareOp.NOT_EQUAL; - else if (compareOp.equals(ParseConstants.EQUAL_TO_BUFFER)) - return CompareOp.EQUAL; - else - throw new IllegalArgumentException("Invalid compare operator"); - } - /** * Parses a comparator of the form comparatorType:comparatorValue form and returns a comparator *

diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java index 9d1d8c75701..7b6167f0746 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java @@ -16,7 +16,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.filter; import java.io.IOException; @@ -46,20 +45,6 @@ import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferExce */ @InterfaceAudience.Public public class QualifierFilter extends CompareFilter { - - /** - * Constructor. - * @param op the compare op for column qualifier matching - * @param qualifierComparator the comparator for column qualifier matching - * @deprecated Since 2.0.0. Will be removed in 3.0.0. - * Use {@link #QualifierFilter(CompareOperator, ByteArrayComparable)} instead. - */ - @Deprecated - public QualifierFilter(final CompareOp op, - final ByteArrayComparable qualifierComparator) { - super(op, qualifierComparator); - } - /** * Constructor. * @param op the compare op for column qualifier matching diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java index 6fe32fb1749..fcda1e09ee8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java @@ -16,7 +16,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.filter; import java.io.IOException; @@ -45,22 +44,8 @@ import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferExce */ @InterfaceAudience.Public public class RowFilter extends CompareFilter { - private boolean filterOutRow = false; - /** - * Constructor. - * @param rowCompareOp the compare op for row matching - * @param rowComparator the comparator for row matching - * @deprecated Since 2.0.0. Will remove in 3.0.0. Use - * {@link #RowFilter(CompareOperator, ByteArrayComparable)}} instead. - */ - @Deprecated - public RowFilter(final CompareOp rowCompareOp, - final ByteArrayComparable rowComparator) { - super(rowCompareOp, rowComparator); - } - /** * Constructor. * @param op the compare op for row matching diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java index 7b24b03961a..0c0d4f3e220 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java @@ -16,7 +16,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.filter; import java.io.IOException; @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; @@ -42,25 +40,6 @@ import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferExce */ @InterfaceAudience.Public public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { - - /** - * Constructor for binary compare of the value of a single column. If the - * column is found and the condition passes, all columns of the row will be - * emitted; except for the tested column value. If the column is not found or - * the condition fails, the row will not be emitted. - * - * @param family name of column family - * @param qualifier name of column qualifier - * @param compareOp operator - * @param value value to compare column values against - * {@link #SingleColumnValueExcludeFilter(byte[], byte[], CompareOperator, byte[])} - */ - @Deprecated - public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, - CompareOp compareOp, byte[] value) { - super(family, qualifier, compareOp, value); - } - /** * Constructor for binary compare of the value of a single column. If the * column is found and the condition passes, all columns of the row will be @@ -77,29 +56,6 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { super(family, qualifier, op, value); } - /** - * Constructor for binary compare of the value of a single column. If the - * column is found and the condition passes, all columns of the row will be - * emitted; except for the tested column value. If the condition fails, the - * row will not be emitted. - *

- * Use the filterIfColumnMissing flag to set whether the rest of the columns - * in a row will be emitted if the specified column to check is not found in - * the row. - * - * @param family name of column family - * @param qualifier name of column qualifier - * @param compareOp operator - * @param comparator Comparator to use. - * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use - * {@link #SingleColumnValueExcludeFilter(byte[], byte[], CompareOperator, ByteArrayComparable)} - */ - @Deprecated - public SingleColumnValueExcludeFilter(byte[] family, byte[] qualifier, - CompareOp compareOp, ByteArrayComparable comparator) { - super(family, qualifier, compareOp, comparator); - } - /** * Constructor for binary compare of the value of a single column. If the * column is found and the condition passes, all columns of the row will be @@ -120,26 +76,6 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { super(family, qualifier, op, comparator); } - - /** - * Constructor for protobuf deserialization only. - * @param family - * @param qualifier - * @param compareOp - * @param comparator - * @param filterIfMissing - * @param latestVersionOnly - * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use - * {@link #SingleColumnValueExcludeFilter(byte[], byte[], CompareOperator, ByteArrayComparable, boolean, boolean)} - */ - @Deprecated - protected SingleColumnValueExcludeFilter(final byte[] family, final byte[] qualifier, - final CompareOp compareOp, ByteArrayComparable comparator, final boolean filterIfMissing, - final boolean latestVersionOnly) { - this(family, qualifier, CompareOperator.valueOf(compareOp.name()), comparator, - filterIfMissing, latestVersionOnly); - } - /** * Constructor for protobuf deserialization only. * @param family diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index b4303cdb76d..92422c9a853 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -16,7 +16,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.filter; import java.io.IOException; @@ -29,7 +28,6 @@ import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.exceptions.DeserializationException; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException; import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; @@ -41,7 +39,7 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; /** - * This filter is used to filter cells based on value. It takes a {@link CompareFilter.CompareOp} + * This filter is used to filter cells based on value. It takes a {@link CompareOperator} * operator (equal, greater, not equal, etc), and either a byte [] value or * a ByteArrayComparable. *

@@ -80,29 +78,6 @@ public class SingleColumnValueFilter extends FilterBase { protected boolean filterIfMissing = false; protected boolean latestVersionOnly = true; - /** - * Constructor for binary compare of the value of a single column. If the - * column is found and the condition passes, all columns of the row will be - * emitted. If the condition fails, the row will not be emitted. - *

- * Use the filterIfColumnMissing flag to set whether the rest of the columns - * in a row will be emitted if the specified column to check is not found in - * the row. - * - * @param family name of column family - * @param qualifier name of column qualifier - * @param compareOp operator - * @param value value to compare column values against - * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use - * {@link #SingleColumnValueFilter(byte[], byte[], CompareOperator, byte[])} instead. - */ - @Deprecated - public SingleColumnValueFilter(final byte [] family, final byte [] qualifier, - final CompareOp compareOp, final byte[] value) { - this(family, qualifier, CompareOperator.valueOf(compareOp.name()), - new org.apache.hadoop.hbase.filter.BinaryComparator(value)); - } - /** * Constructor for binary compare of the value of a single column. If the * column is found and the condition passes, all columns of the row will be @@ -123,29 +98,6 @@ public class SingleColumnValueFilter extends FilterBase { new org.apache.hadoop.hbase.filter.BinaryComparator(value)); } - /** - * Constructor for binary compare of the value of a single column. If the - * column is found and the condition passes, all columns of the row will be - * emitted. If the condition fails, the row will not be emitted. - *

- * Use the filterIfColumnMissing flag to set whether the rest of the columns - * in a row will be emitted if the specified column to check is not found in - * the row. - * - * @param family name of column family - * @param qualifier name of column qualifier - * @param compareOp operator - * @param comparator Comparator to use. - * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use - * {@link #SingleColumnValueFilter(byte[], byte[], CompareOperator, ByteArrayComparable)} instead. - */ - @Deprecated - public SingleColumnValueFilter(final byte [] family, final byte [] qualifier, - final CompareOp compareOp, - final org.apache.hadoop.hbase.filter.ByteArrayComparable comparator) { - this(family, qualifier, CompareOperator.valueOf(compareOp.name()), comparator); - } - /** * Constructor for binary compare of the value of a single column. If the * column is found and the condition passes, all columns of the row will be @@ -169,27 +121,6 @@ public class SingleColumnValueFilter extends FilterBase { this.comparator = comparator; } - /** - * Constructor for protobuf deserialization only. - * @param family - * @param qualifier - * @param compareOp - * @param comparator - * @param filterIfMissing - * @param latestVersionOnly - * @deprecated Since 2.0.0. Will be removed in 3.0.0. Use - * {@link #SingleColumnValueFilter(byte[], byte[], CompareOperator, ByteArrayComparable, - * boolean, boolean)} instead. - */ - @Deprecated - protected SingleColumnValueFilter(final byte[] family, final byte[] qualifier, - final CompareOp compareOp, org.apache.hadoop.hbase.filter.ByteArrayComparable comparator, - final boolean filterIfMissing, - final boolean latestVersionOnly) { - this(family, qualifier, CompareOperator.valueOf(compareOp.name()), comparator, filterIfMissing, - latestVersionOnly); - } - /** * Constructor for protobuf deserialization only. * @param family @@ -212,8 +143,8 @@ public class SingleColumnValueFilter extends FilterBase { * @deprecated since 2.0.0. Will be removed in 3.0.0. Use {@link #getCompareOperator()} instead. */ @Deprecated - public CompareOp getOperator() { - return CompareOp.valueOf(op.name()); + public CompareOperator getOperator() { + return CompareOperator.valueOf(op.name()); } public CompareOperator getCompareOperator() { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java index 7e958f03a29..38fe45160e7 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java @@ -16,7 +16,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.filter; import java.io.IOException; @@ -45,19 +44,6 @@ import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferExce */ @InterfaceAudience.Public public class ValueFilter extends CompareFilter { - - /** - * Constructor. - * @param valueCompareOp the compare op for value matching - * @param valueComparator the comparator for value matching - * @deprecated Since 2.0.0. Will be removed in 3.0.0. - * Use {@link #ValueFilter(CompareOperator, ByteArrayComparable)} - */ - public ValueFilter(final CompareOp valueCompareOp, - final ByteArrayComparable valueComparator) { - super(valueCompareOp, valueComparator); - } - /** * Constructor. * @param valueCompareOp the compare op for value matching diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java index 05596f44549..cc48fd07e36 100644 --- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java +++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestOperation.java @@ -29,6 +29,7 @@ import java.util.Map; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellUtil; +import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; @@ -37,7 +38,6 @@ import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; import org.apache.hadoop.hbase.filter.ColumnPaginationFilter; import org.apache.hadoop.hbase.filter.ColumnPrefixFilter; import org.apache.hadoop.hbase.filter.ColumnRangeFilter; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.DependentColumnFilter; import org.apache.hadoop.hbase.filter.FamilyFilter; import org.apache.hadoop.hbase.filter.Filter; @@ -161,7 +161,7 @@ public class TestOperation { private static String STR_FIRST_KEY_ONLY_FILTER = FIRST_KEY_ONLY_FILTER.getClass().getSimpleName(); - private static CompareOp CMP_OP = CompareOp.EQUAL; + private static CompareOperator CMP_OP = CompareOperator.EQUAL; private static byte[] CMP_VALUE = Bytes.toBytes("value"); private static BinaryComparator BC = new BinaryComparator(CMP_VALUE); private static DependentColumnFilter DC_FILTER = diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java index 0ece796051a..bdb383856ef 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/RemoteHTable.java @@ -16,7 +16,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.rest.client; import com.google.protobuf.Descriptors; @@ -54,7 +53,6 @@ import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.rest.Constants; @@ -716,13 +714,6 @@ public class RemoteHTable implements Table { throw new IOException("checkAndPut request timed out"); } - @Override - @Deprecated - public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, - CompareOp compareOp, byte[] value, Put put) throws IOException { - throw new IOException("checkAndPut for non-equal comparison not implemented"); - } - @Override @Deprecated public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, @@ -773,13 +764,6 @@ public class RemoteHTable implements Table { throw new IOException("checkAndDelete request timed out"); } - @Override - @Deprecated - public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, - CompareOp compareOp, byte[] value, Delete delete) throws IOException { - throw new IOException("checkAndDelete for non-equal comparison not implemented"); - } - @Override @Deprecated public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, @@ -792,13 +776,6 @@ public class RemoteHTable implements Table { return new CheckAndMutateBuilderImpl(row, family); } - @Override - @Deprecated - public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, - CompareOp compareOp, byte[] value, RowMutations rm) throws IOException { - throw new UnsupportedOperationException("checkAndMutate not implemented"); - } - @Override @Deprecated public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java index 7558e8fc120..831bb3a630b 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/model/ScannerModel.java @@ -16,7 +16,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.apache.hadoop.hbase.rest.model; import java.io.IOException; @@ -282,7 +281,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { if (qualifier != null) { this.qualifier = Bytes.toString(Base64.getEncoder().encode(qualifier)); } - this.op = dcf.getOperator().toString(); + this.op = dcf.getCompareOperator().toString(); this.comparator = new ByteArrayComparableModel(dcf.getComparator()); this.dropDependentColumn = dcf.dropDependentColumn(); } break; @@ -324,7 +323,7 @@ public class ScannerModel implements ProtobufMessageHandler, Serializable { case QualifierFilter: case RowFilter: case ValueFilter: - this.op = ((CompareFilter)filter).getOperator().toString(); + this.op = ((CompareFilter)filter).getCompareOperator().toString(); this.comparator = new ByteArrayComparableModel( ((CompareFilter)filter).getComparator()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java index 3b52db6a89b..6b6428c3f38 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hbase.filter; +import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; @@ -80,9 +81,9 @@ public class TestFilterListOnMini { put2.addColumn(CF2, Bytes.toBytes("col_b"), Bytes.toBytes(0)); table.put(put2); FamilyFilter filterCF1 = - new FamilyFilter(CompareFilter.CompareOp.EQUAL, new BinaryComparator(CF1)); + new FamilyFilter(CompareOperator.EQUAL, new BinaryComparator(CF1)); FamilyFilter filterCF2 = - new FamilyFilter(CompareFilter.CompareOp.EQUAL, new BinaryComparator(CF2)); + new FamilyFilter(CompareOperator.EQUAL, new BinaryComparator(CF2)); FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE); filterList.addFilter(filterCF1); filterList.addFilter(filterCF2); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java index 0cda421cdc4..6a520d1145c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/RegionAsTable.java @@ -49,7 +49,6 @@ import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.coprocessor.Batch.Call; import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback; import org.apache.hadoop.hbase.client.metrics.ScanMetrics; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; /** @@ -219,14 +218,6 @@ public class RegionAsTable implements Table { throw new UnsupportedOperationException(); } - @Override - @Deprecated - public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp, - byte[] value, Put put) - throws IOException { - throw new UnsupportedOperationException(); - } - @Override @Deprecated public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, @@ -252,14 +243,6 @@ public class RegionAsTable implements Table { throw new UnsupportedOperationException(); } - @Override - @Deprecated - public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, - CompareOp compareOp, byte[] value, Delete delete) - throws IOException { - throw new UnsupportedOperationException(); - } - @Override @Deprecated public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, @@ -342,14 +325,6 @@ public class RegionAsTable implements Table { throw new UnsupportedOperationException(); } - @Override - @Deprecated - public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareOp compareOp, - byte[] value, RowMutations mutation) - throws IOException { - throw new UnsupportedOperationException(); - } - @Override @Deprecated public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 6af4bb14aea..a2664ce6bda 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -118,7 +118,6 @@ import org.apache.hadoop.hbase.exceptions.FailedSanityCheckException; import org.apache.hadoop.hbase.filter.BigDecimalComparator; import org.apache.hadoop.hbase.filter.BinaryComparator; import org.apache.hadoop.hbase.filter.ColumnCountGetFilter; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterBase; import org.apache.hadoop.hbase.filter.FilterList; @@ -1451,7 +1450,7 @@ public class TestHRegion { allFilters.addFilter(new PrefixFilter(Bytes.toBytes(keyPrefix))); // Only return rows where this column value exists in the row. SingleColumnValueFilter filter = new SingleColumnValueFilter(Bytes.toBytes("trans-tags"), - Bytes.toBytes("qual2"), CompareOp.EQUAL, Bytes.toBytes(value)); + Bytes.toBytes("qual2"), CompareOperator.EQUAL, Bytes.toBytes(value)); filter.setFilterIfMissing(true); allFilters.addFilter(filter); Scan scan = new Scan(); @@ -2638,7 +2637,7 @@ public class TestHRegion { // Get 3 versions, the oldest version has gone from user view assertEquals(maxVersions, res.size()); - get.setFilter(new ValueFilter(CompareOp.EQUAL, new SubstringComparator("value"))); + get.setFilter(new ValueFilter(CompareOperator.EQUAL, new SubstringComparator("value"))); res = region.get(get); // When use value filter, the oldest version should still gone from user view and it // should only return one key vaule @@ -3282,7 +3281,7 @@ public class TestHRegion { Scan scan = new Scan(); Filter filter = new SingleColumnValueExcludeFilter(cf_essential, col_normal, - CompareOp.NOT_EQUAL, filtered_val); + CompareOperator.NOT_EQUAL, filtered_val); scan.setFilter(filter); scan.setLoadColumnFamiliesOnDemand(true); InternalScanner s = region.getScanner(scan); @@ -3442,7 +3441,7 @@ public class TestHRegion { Scan scan = new Scan(); scan.addFamily(family); - scan.setFilter(new SingleColumnValueFilter(family, qual1, CompareOp.EQUAL, + scan.setFilter(new SingleColumnValueFilter(family, qual1, CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes(5L)))); int expectedCount = 0; @@ -3881,9 +3880,9 @@ public class TestHRegion { Scan idxScan = new Scan(); idxScan.addFamily(family); idxScan.setFilter(new FilterList(FilterList.Operator.MUST_PASS_ALL, Arrays. asList( - new SingleColumnValueFilter(family, qual1, CompareOp.GREATER_OR_EQUAL, + new SingleColumnValueFilter(family, qual1, CompareOperator.GREATER_OR_EQUAL, new BinaryComparator(Bytes.toBytes(0L))), new SingleColumnValueFilter(family, qual1, - CompareOp.LESS_OR_EQUAL, new BinaryComparator(Bytes.toBytes(3L)))))); + CompareOperator.LESS_OR_EQUAL, new BinaryComparator(Bytes.toBytes(3L)))))); InternalScanner scanner = region.getScanner(idxScan); List res = new ArrayList<>(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestIsDeleteFailure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestIsDeleteFailure.java index 1198867bf2f..84203589c55 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestIsDeleteFailure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestIsDeleteFailure.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.regionserver; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HTableDescriptor; @@ -29,7 +30,6 @@ import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.BinaryComparator; -import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; @@ -148,7 +148,7 @@ public class TestIsDeleteFailure { scan.addColumn(family, c9); scan.addColumn(family, c15); SingleColumnValueFilter filter = - new SingleColumnValueFilter(family, c15, CompareFilter.CompareOp.EQUAL, + new SingleColumnValueFilter(family, c15, CompareOperator.EQUAL, new BinaryComparator(c15)); scan.setFilter(filter); //Trigger the scan for not existing row, so it will scan over all rows diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java index 3aa35e1cc52..a34daa6b1aa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestJoinedScanners.java @@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; -import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.testclassification.LargeTests; @@ -173,7 +172,7 @@ public class TestJoinedScanners { scan.addColumn(cf_joined, col_name); SingleColumnValueFilter filter = new SingleColumnValueFilter( - cf_essential, col_name, CompareFilter.CompareOp.EQUAL, flag_yes); + cf_essential, col_name, CompareOperator.EQUAL, flag_yes); filter.setFilterIfMissing(true); scan.setFilter(filter); scan.setLoadColumnFamiliesOnDemand(!slow); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java index 116f2de053a..f0a02faf138 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerReadRequestMetrics.java @@ -30,6 +30,7 @@ import java.util.Map; import java.util.Optional; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.ClusterMetrics.Option; +import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.RegionLoad; @@ -55,7 +56,6 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.filter.BinaryComparator; -import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.filter.RowFilter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.master.LoadBalancer; @@ -340,7 +340,7 @@ public class TestRegionServerReadRequestMetrics { // test for scan scan = new Scan(); - scan.setFilter(new SingleColumnValueFilter(CF1, COL1, CompareFilter.CompareOp.EQUAL, VAL1)); + scan.setFilter(new SingleColumnValueFilter(CF1, COL1, CompareOperator.EQUAL, VAL1)); try (ResultScanner scanner = table.getScanner(scan)) { resultCount = 0; for (Result ignore : scanner) { @@ -351,7 +351,7 @@ public class TestRegionServerReadRequestMetrics { // test for scan scan = new Scan(); - scan.setFilter(new RowFilter(CompareFilter.CompareOp.EQUAL, new BinaryComparator(ROW1))); + scan.setFilter(new RowFilter(CompareOperator.EQUAL, new BinaryComparator(ROW1))); try (ResultScanner scanner = table.getScanner(scan)) { resultCount = 0; for (Result ignore : scanner) { @@ -362,7 +362,7 @@ public class TestRegionServerReadRequestMetrics { // test for scan scan = new Scan(ROW2, ROW3); - scan.setFilter(new RowFilter(CompareFilter.CompareOp.EQUAL, new BinaryComparator(ROW1))); + scan.setFilter(new RowFilter(CompareOperator.EQUAL, new BinaryComparator(ROW1))); try (ResultScanner scanner = table.getScanner(scan)) { resultCount = 0; for (Result ignore : scanner) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java index 66e18472ea0..93c32911c76 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestReversibleScanners.java @@ -31,6 +31,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; +import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -43,7 +44,6 @@ import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.FilterList; import org.apache.hadoop.hbase.filter.FilterList.Operator; @@ -383,7 +383,7 @@ public class TestReversibleScanners { // Case8: Case7 + SingleColumnValueFilter int valueNum = startRowNum % VALUESIZE; Filter filter = new SingleColumnValueFilter(FAMILYNAME, - specifiedQualifiers[0], CompareOp.EQUAL, VALUES[valueNum]); + specifiedQualifiers[0], CompareOperator.EQUAL, VALUES[valueNum]); scan.setFilter(filter); scanner = region.getScanner(scan); int unfilteredRowNum = (startRowNum - stopRowNum) / VALUESIZE @@ -401,9 +401,9 @@ public class TestReversibleScanners { // Case10: Case7 + FilterList+MUST_PASS_ONE SingleColumnValueFilter scvFilter1 = new SingleColumnValueFilter( - FAMILYNAME, specifiedQualifiers[0], CompareOp.EQUAL, VALUES[0]); + FAMILYNAME, specifiedQualifiers[0], CompareOperator.EQUAL, VALUES[0]); SingleColumnValueFilter scvFilter2 = new SingleColumnValueFilter( - FAMILYNAME, specifiedQualifiers[0], CompareOp.EQUAL, VALUES[1]); + FAMILYNAME, specifiedQualifiers[0], CompareOperator.EQUAL, VALUES[1]); expectedRowNum = 0; for (int i = startRowNum; i > stopRowNum; i--) { if (i % VALUESIZE == 0 || i % VALUESIZE == 1) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java index 5b78be2e02f..a1d908586e9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSCVFWithMiniCluster.java @@ -23,6 +23,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -38,7 +39,6 @@ import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.filter.BinaryComparator; -import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.SingleColumnValueFilter; import org.apache.hadoop.hbase.io.compress.Compression.Algorithm; @@ -122,7 +122,7 @@ public class TestSCVFWithMiniCluster { * We want to filter out from the scan all rows that do not have the column 'a:foo' with value * 'false'. Only row with key '1' should be returned in the scan. */ - scanFilter = new SingleColumnValueFilter(FAMILY_A, QUALIFIER_FOO, CompareOp.EQUAL, + scanFilter = new SingleColumnValueFilter(FAMILY_A, QUALIFIER_FOO, CompareOperator.EQUAL, new BinaryComparator(Bytes.toBytes("false"))); ((SingleColumnValueFilter) scanFilter).setFilterIfMissing(true); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java index 479b4f3eb64..ff46a985568 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/regionserver/TestWALEntrySinkFilter.java @@ -61,7 +61,6 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableBuilder; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.coprocessor.Batch; -import org.apache.hadoop.hbase.filter.CompareFilter; import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.testclassification.ReplicationTests; @@ -378,11 +377,6 @@ public class TestWALEntrySinkFilter { return false; } - @Override - public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, CompareFilter.CompareOp compareOp, byte[] value, Put put) throws IOException { - return false; - } - @Override public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, CompareOperator op, byte[] value, Put put) throws IOException { return false; @@ -403,11 +397,6 @@ public class TestWALEntrySinkFilter { return false; } - @Override - public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, CompareFilter.CompareOp compareOp, byte[] value, Delete delete) throws IOException { - return false; - } - @Override public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, CompareOperator op, byte[] value, Delete delete) throws IOException { return false; @@ -473,11 +462,6 @@ public class TestWALEntrySinkFilter { } - @Override - public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareFilter.CompareOp compareOp, byte[] value, RowMutations mutation) throws IOException { - return false; - } - @Override public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareOperator op, byte[] value, RowMutations mutation) throws IOException { return false; diff --git a/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift b/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift index 4af843ad962..8b46f27df73 100644 --- a/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift +++ b/hbase-thrift/src/main/resources/org/apache/hadoop/hbase/thrift2/hbase.thrift @@ -306,9 +306,9 @@ struct THRegionLocation { /** * Thrift wrapper around - * org.apache.hadoop.hbase.filter.CompareFilter$CompareOp. + * org.apache.hadoop.hbase.CompareOperator. */ -enum TCompareOp { +enum TCompareOperator { LESS = 0, LESS_OR_EQUAL = 1, EQUAL = 2, @@ -784,7 +784,7 @@ service THBaseService { 4: required binary qualifier, /** comparison to make on the value */ - 5: required TCompareOp compareOp, + 5: required TCompareOperator compareOperator, /** the expected value to be compared against, if not provided the check is for the non-existence of the column in question */