HBASE-19008 Add missing equals or hashCode method(s) to stock Filter implementations

Signed-off-by: Reid Chan <reidchan@apache.org>
Signed-off-by: Ted Yu <yuzhihong@gmail.com>

and

HBASE-21129 Clean up duplicate codes in #equals and #hashCode methods of Filter

Signed-off-by Ted Yu <yuzhihong@gmail.com>

Signed-off-by: Andrew Purtell <apurtell@apache.org>
This commit is contained in:
liubangchen 2018-08-23 11:19:17 +08:00 committed by Andrew Purtell
parent 49fab7df80
commit e79e4aefc0
No known key found for this signature in database
GPG Key ID: 8597754DD5365CCD
33 changed files with 508 additions and 28 deletions

View File

@ -20,6 +20,7 @@
package org.apache.hadoop.hbase.filter;
import java.util.ArrayList;
import java.util.Objects;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
@ -127,4 +128,14 @@ public class ColumnCountGetFilter extends FilterBase {
public String toString() {
return this.getClass().getSimpleName() + " " + this.limit;
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(this.limit);
}
}

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.filter;
import java.util.ArrayList;
import java.util.Objects;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -236,4 +237,15 @@ public class ColumnPaginationFilter extends FilterBase
return String.format("%s (%d, %d)", this.getClass().getSimpleName(),
this.limit, this.offset);
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return columnOffset == null ? Objects.hash(this.limit, this.offset) :
Objects.hash(this.limit, Bytes.hashCode(this.columnOffset));
}
}

View File

@ -149,4 +149,14 @@ public class ColumnPrefixFilter extends FilterBase {
public String toString() {
return this.getClass().getSimpleName() + " " + Bytes.toStringBinary(this.prefix);
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Bytes.hashCode(this.getPrefix());
}
}

View File

@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.Bytes;
import java.util.ArrayList;
import java.util.Objects;
/**
* This filter is used for selecting only those keys with columns that are
@ -242,4 +243,15 @@ public class ColumnRangeFilter extends FilterBase {
+ ", " + Bytes.toStringBinary(this.maxColumn)
+ (this.maxColumnInclusive ? "]" : ")");
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(Bytes.hashCode(getMinColumn()), getMinColumnInclusive(),
Bytes.hashCode(getMaxColumn()), getMaxColumnInclusive());
}
}

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.filter;
import com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@ -30,6 +31,8 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType;
import org.apache.hadoop.hbase.util.Bytes;
import java.util.ArrayList;
import java.util.Objects;
/**
* This is a generic filter to be used to filter by comparison. It takes an
* operator (equal, greater, not equal, etc) and a byte [] comparator.
@ -188,4 +191,14 @@ public abstract class CompareFilter extends FilterBase {
this.compareOp.name(),
Bytes.toStringBinary(this.comparator.getValue()));
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(this.getComparator(), this.getOperator());
}
}

View File

@ -23,6 +23,7 @@ import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import org.apache.hadoop.hbase.util.ByteStringer;
@ -291,4 +292,15 @@ public class DependentColumnFilter extends CompareFilter {
this.compareOp.name(),
this.comparator != null ? Bytes.toStringBinary(this.comparator.getValue()) : "null");
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(Bytes.hashCode(getFamily()), Bytes.hashCode(getQualifier()),
dropDependentColumn(), getComparator(), getOperator());
}
}

View File

@ -130,4 +130,14 @@ public class FamilyFilter extends CompareFilter {
FamilyFilter other = (FamilyFilter)o;
return super.areSerializedFieldsEqual(other);
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return super.hashCode();
}
}

View File

@ -23,6 +23,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.Cell;
@ -295,4 +296,14 @@ final public class FilterList extends Filter {
public String toString() {
return this.filterListBase.toString();
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(getOperator(), getFilters());
}
}

View File

@ -26,6 +26,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
/**
* FilterListWithAND represents an ordered list of filters which will be evaluated with an AND
@ -260,4 +261,21 @@ public class FilterListWithAND extends FilterListBase {
}
return maxHint;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof FilterListWithAND)) {
return false;
}
if (this == obj) {
return true;
}
FilterListWithAND f = (FilterListWithAND) obj;
return this.filters.equals(f.getFilters()) && this.seekHintFilters.equals(f.seekHintFilters);
}
@Override
public int hashCode() {
return Objects.hash(this.seekHintFilters, this.filters);
}
}

View File

@ -27,6 +27,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
/**
* FilterListWithOR represents an ordered list of filters which will be evaluated with an OR
@ -377,4 +378,24 @@ public class FilterListWithOR extends FilterListBase {
}
return minKeyHint;
}
@Override
public boolean equals(Object obj) {
if (obj == null || (!(obj instanceof FilterListWithOR))) {
return false;
}
if (this == obj) {
return true;
}
FilterListWithOR f = (FilterListWithOR) obj;
return this.filters.equals(f.getFilters()) &&
this.prevFilterRCList.equals(f.prevFilterRCList) &&
this.prevCellList.equals(f.prevCellList);
}
@Override
public int hashCode() {
return Objects.hash(this.prevFilterRCList, this.prevCellList, this.filters);
}
}

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.filter;
import java.util.ArrayList;
import java.util.Objects;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
@ -122,4 +123,14 @@ public class FirstKeyOnlyFilter extends FilterBase {
return true;
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hashCode(foundKV);
}
}

View File

@ -18,10 +18,10 @@
package org.apache.hadoop.hbase.filter;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteStringer;
@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.util.Bytes;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
@ -127,4 +128,14 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter {
FirstKeyValueMatchingQualifiersFilter other = (FirstKeyValueMatchingQualifiersFilter)o;
return this.qualifiers.equals(other.qualifiers);
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(this.qualifiers);
}
}

View File

@ -21,6 +21,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Objects;
import java.util.PriorityQueue;
import org.apache.hadoop.hbase.Cell;
@ -640,4 +641,14 @@ public class FuzzyRowFilter extends FilterBase {
}
return true;
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(this.fuzzyKeysData);
}
}

View File

@ -140,4 +140,14 @@ public class InclusiveStopFilter extends FilterBase {
public String toString() {
return this.getClass().getSimpleName() + " " + Bytes.toStringBinary(this.stopRowKey);
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Bytes.hashCode(this.stopRowKey);
}
}

View File

@ -21,12 +21,13 @@ package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Objects;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.Bytes;
@ -126,4 +127,14 @@ public class KeyOnlyFilter extends FilterBase {
KeyOnlyFilter other = (KeyOnlyFilter)o;
return this.lenAsVal == other.lenAsVal;
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(this.lenAsVal);
}
}

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants;
@ -512,5 +513,38 @@ public class MultiRowRangeFilter extends FilterBase {
|| Bytes.compareTo(startRow, stopRow) < 0
|| (Bytes.compareTo(startRow, stopRow) == 0 && stopRowInclusive == true);
}
@Override
public boolean equals(Object obj){
if (!(obj instanceof RowRange)) {
return false;
}
if (this == obj) {
return true;
}
RowRange rr = (RowRange) obj;
return Bytes.equals(this.stopRow, rr.getStopRow()) &&
Bytes.equals(this.startRow, this.getStartRow()) &&
this.startRowInclusive == rr.isStartRowInclusive() &&
this.stopRowInclusive == rr.isStopRowInclusive();
}
@Override
public int hashCode() {
return Objects.hash(Bytes.hashCode(this.stopRow),
Bytes.hashCode(this.startRow),
this.startRowInclusive,
this.stopRowInclusive);
}
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(this.rangeList);
}
}

View File

@ -18,20 +18,22 @@
package org.apache.hadoop.hbase.filter;
import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Objects;
import java.util.TreeSet;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
/**
* This filter is used for selecting only those keys with columns that matches
* a particular prefix. For example, if prefix is 'an', it will pass keys will
@ -206,4 +208,14 @@ public class MultipleColumnPrefixFilter extends FilterBase {
return String.format("%s (%d/%d): [%s]", this.getClass().getSimpleName(),
count, this.sortedPrefixes.size(), prefixes.toString());
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(this.sortedPrefixes);
}
}

View File

@ -20,14 +20,16 @@ package org.apache.hadoop.hbase.filter;
import com.google.common.base.Preconditions;
import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Objects;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
/**
* Implementation of Filter interface that limits results to a specific page
@ -141,4 +143,14 @@ public class PageFilter extends FilterBase {
public String toString() {
return this.getClass().getSimpleName() + " " + this.pageSize;
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(this.pageSize);
}
}

View File

@ -20,17 +20,17 @@
package org.apache.hadoop.hbase.filter;
import com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.util.ByteStringer;
import com.google.protobuf.InvalidProtocolBufferException;
import java.util.ArrayList;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.util.Bytes;
import java.util.ArrayList;
import org.apache.hadoop.hbase.util.ByteStringer;
/**
* Pass results that have same row prefix.
@ -149,4 +149,14 @@ public class PrefixFilter extends FilterBase {
public String toString() {
return this.getClass().getSimpleName() + " " + Bytes.toStringBinary(this.prefix);
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Bytes.hashCode(this.getPrefix());
}
}

View File

@ -125,4 +125,14 @@ public class QualifierFilter extends CompareFilter {
return super.areSerializedFieldsEqual(o);
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return super.hashCode();
}
}

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.filter;
import java.util.Objects;
import java.util.Random;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -157,4 +158,14 @@ public class RandomRowFilter extends FilterBase {
RandomRowFilter other = (RandomRowFilter)o;
return this.getChance() == other.getChance();
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(this.getChance());
}
}

View File

@ -144,4 +144,14 @@ public class RowFilter extends CompareFilter {
return super.areSerializedFieldsEqual(o);
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return super.hashCode();
}
}

View File

@ -184,4 +184,14 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter {
return super.areSerializedFieldsEqual(o);
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return super.hashCode();
}
}

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Objects;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.commons.logging.Log;
@ -399,4 +400,15 @@ public class SingleColumnValueFilter extends FilterBase {
Bytes.toStringBinary(this.columnQualifier), this.compareOp.name(),
Bytes.toStringBinary(this.comparator.getValue()));
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(Bytes.hashCode(getFamily()), Bytes.hashCode(getQualifier()),
this.getOperator(), getComparator(), getFilterIfMissing(), getLatestVersionOnly());
}
}

View File

@ -20,6 +20,7 @@
package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.Objects;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
@ -152,4 +153,14 @@ public class SkipFilter extends FilterBase {
public String toString() {
return this.getClass().getSimpleName() + " " + this.filter.toString();
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(this.filter);
}
}

View File

@ -22,6 +22,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.TreeSet;
import org.apache.hadoop.hbase.Cell;
@ -233,4 +234,14 @@ public class TimestampsFilter extends FilterBase {
return String.format("%s (%d/%d): [%s] canHint: [%b]", this.getClass().getSimpleName(),
count, this.timestamps.size(), tsList.toString(), canHint);
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(getTimestamps());
}
}

View File

@ -20,16 +20,17 @@
package org.apache.hadoop.hbase.filter;
import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
/**
* This filter is used to filter based on column value. It takes an
* operator (equal, greater, not equal, etc) and a byte [] comparator for the
@ -124,4 +125,14 @@ public class ValueFilter extends CompareFilter {
return super.areSerializedFieldsEqual(o);
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return super.hashCode();
}
}

View File

@ -20,6 +20,7 @@
package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.Objects;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
@ -152,4 +153,14 @@ public class WhileMatchFilter extends FilterBase {
public String toString() {
return this.getClass().getSimpleName() + " " + this.filter.toString();
}
@Override
public boolean equals(Object obj) {
return obj instanceof Filter && areSerializedFieldsEqual((Filter) obj);
}
@Override
public int hashCode() {
return Objects.hash(this.filter);
}
}

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.security.access;
import java.io.IOException;
import java.util.Map;
import java.util.Objects;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
@ -174,4 +175,26 @@ class AccessControlFilter extends FilterBase {
throw new UnsupportedOperationException(
"Serialization not supported. Intended for server-side use only.");
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof AccessControlFilter)) {
return false;
}
if (this == obj){
return true;
}
AccessControlFilter f=(AccessControlFilter)obj;
return this.authManager.equals(f.authManager) &&
this.table.equals(f.table) &&
this.user.equals(f.user) &&
this.strategy.equals(f.strategy) &&
this.cfVsMaxVersions.equals(f.cfVsMaxVersions);
}
@Override
public int hashCode() {
return Objects.hash(this.authManager, this.table, this.strategy, this.user,
this.cfVsMaxVersions);
}
}

View File

@ -30,6 +30,7 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -1117,6 +1118,24 @@ public class VisibilityController extends BaseMasterAndRegionObserver implements
public Cell transformCell(Cell v) {
return v;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof DeleteVersionVisibilityExpressionFilter)) {
return false;
}
if (this == obj){
return true;
}
DeleteVersionVisibilityExpressionFilter f = (DeleteVersionVisibilityExpressionFilter)obj;
return this.deleteCellVisTags.equals(f.deleteCellVisTags) &&
this.deleteCellVisTagsFormat.equals(f.deleteCellVisTagsFormat);
}
@Override
public int hashCode() {
return Objects.hash(this.deleteCellVisTags, this.deleteCellVisTagsFormat);
}
}
/**

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.security.visibility;
import java.io.IOException;
import java.util.Map;
import java.util.Objects;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
@ -92,4 +93,22 @@ class VisibilityLabelFilter extends FilterBase {
this.curFamilyMaxVersions = 0;
this.curQualMetVersions = 0;
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof VisibilityLabelFilter)) {
return false;
}
if(this == obj){
return true;
}
VisibilityLabelFilter f = (VisibilityLabelFilter)obj;
return this.expEvaluator.equals(f.expEvaluator) &&
this.cfVsMaxVersions.equals(f.cfVsMaxVersions);
}
@Override
public int hashCode() {
return Objects.hash(this.expEvaluator, this.cfVsMaxVersions);
}
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.client;
import java.io.IOException;
import java.util.Objects;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -55,4 +56,21 @@ public final class ColumnCountOnRowFilter extends FilterBase {
public static ColumnCountOnRowFilter parseFrom(byte[] bytes) throws DeserializationException {
return new ColumnCountOnRowFilter(Bytes.toInt(bytes));
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof ColumnCountOnRowFilter)) {
return false;
}
if (this == obj) {
return true;
}
ColumnCountOnRowFilter f = (ColumnCountOnRowFilter) obj;
return this.limit == f.limit;
}
@Override
public int hashCode() {
return Objects.hash(this.limit);
}
}

View File

@ -30,6 +30,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Objects;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
@ -635,6 +636,23 @@ public class TestFilterList {
this.didCellPassToTheFilter = true;
return targetRetCode;
}
@Override
public boolean equals(Object obj) {
if(obj == null || !(obj instanceof MockFilter)){
return false;
}
if(obj == this){
return true;
}
MockFilter f = (MockFilter)obj;
return this.targetRetCode.equals(f.targetRetCode);
}
@Override
public int hashCode() {
return Objects.hash(this.targetRetCode);
}
}
@Test
@ -807,6 +825,23 @@ public class TestFilterList {
public Cell getNextCellHint(Cell currentCell) throws IOException {
return this.returnCell;
}
@Override
public boolean equals(Object obj) {
if(obj == null || !(obj instanceof MockSeekHintFilter)){
return false;
}
if(obj == this){
return true;
}
MockSeekHintFilter f = (MockSeekHintFilter)obj;
return this.returnCell.equals(f.returnCell);
}
@Override
public int hashCode() {
return Objects.hash(this.returnCell);
}
}
@Test
@ -975,6 +1010,23 @@ public class TestFilterList {
public boolean getTransformed() {
return this.transformed;
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof TransformFilter)){
return false;
}
if (obj == this) {
return true;
}
TransformFilter f = (TransformFilter)obj;
return this.targetRetCode.equals(f.targetRetCode);
}
@Override
public int hashCode() {
return Objects.hash(this.targetRetCode);
}
}
@Test