diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java index 357f19c8831..fd2eb27c23f 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java @@ -32,6 +32,8 @@ import org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferExce import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.util.Bytes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * This filter is used for selecting only those keys with columns that matches @@ -40,15 +42,18 @@ import org.apache.hadoop.hbase.util.Bytes; */ @InterfaceAudience.Public public class MultipleColumnPrefixFilter extends FilterBase { + private static final Logger LOG = LoggerFactory.getLogger(MultipleColumnPrefixFilter.class); protected byte [] hint = null; protected TreeSet sortedPrefixes = createTreeSet(); private final static int MAX_LOG_PREFIXES = 5; public MultipleColumnPrefixFilter(final byte [][] prefixes) { if (prefixes != null) { - for (int i = 0; i < prefixes.length; i++) { - if (!sortedPrefixes.add(prefixes[i])) - throw new IllegalArgumentException ("prefixes must be distinct"); + for (byte[] prefix : prefixes) { + if (!sortedPrefixes.add(prefix)) { + LOG.error("prefix {} is repeated", Bytes.toString(prefix)); + throw new IllegalArgumentException("prefixes must be distinct"); + } } } }