HBASE-19488 Move to using Apache commons CollectionUtils
Signed-off-by: Apekshit Sharma <appy@apache.org>
This commit is contained in:
parent
dcc840e8a5
commit
d866e7c658
|
@ -22,8 +22,9 @@ import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.commons.collections.CollectionUtils;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.CollectionUtils;
|
|
||||||
import org.apache.yetus.audience.InterfaceAudience;
|
import org.apache.yetus.audience.InterfaceAudience;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -47,7 +48,7 @@ public class RowMutations implements Row {
|
||||||
*/
|
*/
|
||||||
public static RowMutations of(List<? extends Mutation> mutations) throws IOException {
|
public static RowMutations of(List<? extends Mutation> mutations) throws IOException {
|
||||||
if (CollectionUtils.isEmpty(mutations)) {
|
if (CollectionUtils.isEmpty(mutations)) {
|
||||||
throw new IllegalArgumentException("Can't instantiate a RowMutations by empty list");
|
throw new IllegalArgumentException("Cannot instantiate a RowMutations by empty list");
|
||||||
}
|
}
|
||||||
return new RowMutations(mutations.get(0).getRow(), mutations.size())
|
return new RowMutations(mutations.get(0).getRow(), mutations.size())
|
||||||
.add(mutations);
|
.add(mutations);
|
||||||
|
|
|
@ -38,6 +38,7 @@ import java.util.Comparator;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.commons.collections.CollectionUtils;
|
||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellComparator;
|
import org.apache.hadoop.hbase.CellComparator;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
|
|
|
@ -19,10 +19,6 @@
|
||||||
package org.apache.hadoop.hbase.util;
|
package org.apache.hadoop.hbase.util;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.concurrent.ConcurrentMap;
|
import java.util.concurrent.ConcurrentMap;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
|
@ -34,82 +30,6 @@ import org.apache.yetus.audience.InterfaceAudience;
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class CollectionUtils {
|
public class CollectionUtils {
|
||||||
|
|
||||||
private static final List<Object> EMPTY_LIST = Collections.unmodifiableList(new ArrayList<>(0));
|
|
||||||
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
public static <T> Collection<T> nullSafe(Collection<T> in) {
|
|
||||||
if (in == null) {
|
|
||||||
return (Collection<T>)EMPTY_LIST;
|
|
||||||
}
|
|
||||||
return in;
|
|
||||||
}
|
|
||||||
|
|
||||||
/************************ size ************************************/
|
|
||||||
|
|
||||||
public static <T> int nullSafeSize(Collection<T> collection) {
|
|
||||||
if (collection == null) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
return collection.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <A, B> boolean nullSafeSameSize(Collection<A> a, Collection<B> b) {
|
|
||||||
return nullSafeSize(a) == nullSafeSize(b);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*************************** empty ****************************************/
|
|
||||||
|
|
||||||
public static <T> boolean isEmpty(Collection<T> collection) {
|
|
||||||
return collection == null || collection.isEmpty();
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> boolean notEmpty(Collection<T> collection) {
|
|
||||||
return !isEmpty(collection);
|
|
||||||
}
|
|
||||||
|
|
||||||
/************************ first/last **************************/
|
|
||||||
|
|
||||||
public static <T> T getFirst(Collection<T> collection) {
|
|
||||||
if (CollectionUtils.isEmpty(collection)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
for (T t : collection) {
|
|
||||||
return t;
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param list any list
|
|
||||||
* @return -1 if list is empty, otherwise the max index
|
|
||||||
*/
|
|
||||||
public static int getLastIndex(List<?> list){
|
|
||||||
if(isEmpty(list)){
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
return list.size() - 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param list
|
|
||||||
* @param index the index in question
|
|
||||||
* @return true if it is the last index or if list is empty and -1 is passed for the index param
|
|
||||||
*/
|
|
||||||
public static boolean isLastIndex(List<?> list, int index){
|
|
||||||
return index == getLastIndex(list);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> T getLast(List<T> list) {
|
|
||||||
if (isEmpty(list)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return list.get(list.size() - 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static <T> List<T> nullToEmpty(List<T> list) {
|
|
||||||
return list != null ? list : Collections.emptyList();
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* In HBASE-16648 we found that ConcurrentHashMap.get is much faster than computeIfAbsent if the
|
* In HBASE-16648 we found that ConcurrentHashMap.get is much faster than computeIfAbsent if the
|
||||||
* value already exists. Notice that the implementation does not guarantee that the supplier will
|
* value already exists. Notice that the implementation does not guarantee that the supplier will
|
||||||
|
|
|
@ -18,11 +18,11 @@
|
||||||
package org.apache.hadoop.hbase.replication;
|
package org.apache.hadoop.hbase.replication;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
|
import org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
|
||||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||||
import org.apache.hadoop.hbase.util.CollectionUtils;
|
|
||||||
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
|
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
|
||||||
import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;
|
import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;
|
||||||
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
|
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
|
||||||
|
@ -131,7 +131,8 @@ public class ZKReplicationPeerStorage extends ZKReplicationStorageBase
|
||||||
@Override
|
@Override
|
||||||
public List<String> listPeerIds() throws ReplicationException {
|
public List<String> listPeerIds() throws ReplicationException {
|
||||||
try {
|
try {
|
||||||
return CollectionUtils.nullToEmpty(ZKUtil.listChildrenNoWatch(zookeeper, peersZNode));
|
List<String> children = ZKUtil.listChildrenNoWatch(zookeeper, peersZNode);
|
||||||
|
return children != null ? children : Collections.emptyList();
|
||||||
} catch (KeeperException e) {
|
} catch (KeeperException e) {
|
||||||
throw new ReplicationException("Cannot get the list of peers", e);
|
throw new ReplicationException("Cannot get the list of peers", e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
package org.apache.hadoop.hbase.replication;
|
package org.apache.hadoop.hbase.replication;
|
||||||
|
|
||||||
import static java.util.stream.Collectors.toList;
|
import static java.util.stream.Collectors.toList;
|
||||||
import static org.apache.hadoop.hbase.util.CollectionUtils.nullToEmpty;
|
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -30,6 +29,7 @@ import java.util.Set;
|
||||||
import java.util.SortedSet;
|
import java.util.SortedSet;
|
||||||
import java.util.TreeSet;
|
import java.util.TreeSet;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
import org.apache.commons.collections.CollectionUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
@ -37,7 +37,6 @@ import org.apache.hadoop.hbase.ServerName;
|
||||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.CollectionUtils;
|
|
||||||
import org.apache.hadoop.hbase.util.Pair;
|
import org.apache.hadoop.hbase.util.Pair;
|
||||||
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
|
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
|
||||||
import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;
|
import org.apache.hadoop.hbase.zookeeper.ZKUtil.ZKUtilOp;
|
||||||
|
@ -451,8 +450,11 @@ class ZKReplicationQueueStorage extends ZKReplicationStorageBase
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<ServerName> getListOfReplicators0() throws KeeperException {
|
private List<ServerName> getListOfReplicators0() throws KeeperException {
|
||||||
return nullToEmpty(ZKUtil.listChildrenNoWatch(zookeeper, queuesZNode)).stream()
|
List<String> children = ZKUtil.listChildrenNoWatch(zookeeper, queuesZNode);
|
||||||
.map(ServerName::parseServerName).collect(toList());
|
if (children == null) {
|
||||||
|
children = Collections.emptyList();
|
||||||
|
}
|
||||||
|
return children.stream().map(ServerName::parseServerName).collect(toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -466,7 +468,9 @@ class ZKReplicationQueueStorage extends ZKReplicationStorageBase
|
||||||
|
|
||||||
private List<String> getWALsInQueue0(ServerName serverName, String queueId)
|
private List<String> getWALsInQueue0(ServerName serverName, String queueId)
|
||||||
throws KeeperException {
|
throws KeeperException {
|
||||||
return nullToEmpty(ZKUtil.listChildrenNoWatch(zookeeper, getQueueNode(serverName, queueId)));
|
List<String> children = ZKUtil.listChildrenNoWatch(zookeeper, getQueueNode(serverName,
|
||||||
|
queueId));
|
||||||
|
return children != null ? children : Collections.emptyList();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -482,7 +486,8 @@ class ZKReplicationQueueStorage extends ZKReplicationStorageBase
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<String> getAllQueues0(ServerName serverName) throws KeeperException {
|
private List<String> getAllQueues0(ServerName serverName) throws KeeperException {
|
||||||
return nullToEmpty(ZKUtil.listChildrenNoWatch(zookeeper, getRsNode(serverName)));
|
List<String> children = ZKUtil.listChildrenNoWatch(zookeeper, getRsNode(serverName));
|
||||||
|
return children != null ? children : Collections.emptyList();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -602,7 +607,8 @@ class ZKReplicationQueueStorage extends ZKReplicationStorageBase
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<String> getAllPeersFromHFileRefsQueue0() throws KeeperException {
|
private List<String> getAllPeersFromHFileRefsQueue0() throws KeeperException {
|
||||||
return nullToEmpty(ZKUtil.listChildrenNoWatch(zookeeper, hfileRefsZNode));
|
List<String> children = ZKUtil.listChildrenNoWatch(zookeeper, hfileRefsZNode);
|
||||||
|
return children != null ? children : Collections.emptyList();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -616,7 +622,9 @@ class ZKReplicationQueueStorage extends ZKReplicationStorageBase
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<String> getReplicableHFiles0(String peerId) throws KeeperException {
|
private List<String> getReplicableHFiles0(String peerId) throws KeeperException {
|
||||||
return nullToEmpty(ZKUtil.listChildrenNoWatch(this.zookeeper, getHFileRefsPeerNode(peerId)));
|
List<String> children = ZKUtil.listChildrenNoWatch(this.zookeeper,
|
||||||
|
getHFileRefsPeerNode(peerId));
|
||||||
|
return children != null ? children : Collections.emptyList();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -70,6 +70,7 @@ import java.util.concurrent.locks.Lock;
|
||||||
import java.util.concurrent.locks.ReadWriteLock;
|
import java.util.concurrent.locks.ReadWriteLock;
|
||||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
import org.apache.commons.collections.CollectionUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
import org.apache.hadoop.fs.FileStatus;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
@ -156,7 +157,6 @@ import org.apache.hadoop.hbase.trace.TraceUtil;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.CancelableProgressable;
|
import org.apache.hadoop.hbase.util.CancelableProgressable;
|
||||||
import org.apache.hadoop.hbase.util.ClassSize;
|
import org.apache.hadoop.hbase.util.ClassSize;
|
||||||
import org.apache.hadoop.hbase.util.CollectionUtils;
|
|
||||||
import org.apache.hadoop.hbase.util.CompressionTest;
|
import org.apache.hadoop.hbase.util.CompressionTest;
|
||||||
import org.apache.hadoop.hbase.util.EncryptionTest;
|
import org.apache.hadoop.hbase.util.EncryptionTest;
|
||||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||||
|
|
|
@ -42,6 +42,7 @@ import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
import java.util.concurrent.atomic.LongAdder;
|
import java.util.concurrent.atomic.LongAdder;
|
||||||
|
import org.apache.commons.collections.CollectionUtils;
|
||||||
import org.apache.commons.lang3.mutable.MutableObject;
|
import org.apache.commons.lang3.mutable.MutableObject;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -125,7 +126,6 @@ import org.apache.hadoop.hbase.security.User;
|
||||||
import org.apache.hadoop.hbase.security.access.AccessChecker;
|
import org.apache.hadoop.hbase.security.access.AccessChecker;
|
||||||
import org.apache.hadoop.hbase.security.access.Permission;
|
import org.apache.hadoop.hbase.security.access.Permission;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.CollectionUtils;
|
|
||||||
import org.apache.hadoop.hbase.util.DNS;
|
import org.apache.hadoop.hbase.util.DNS;
|
||||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||||
import org.apache.hadoop.hbase.util.Pair;
|
import org.apache.hadoop.hbase.util.Pair;
|
||||||
|
|
|
@ -27,6 +27,7 @@ import java.util.OptionalInt;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.locks.ReentrantLock;
|
import java.util.concurrent.locks.ReentrantLock;
|
||||||
|
|
||||||
|
import org.apache.commons.collections.CollectionUtils;
|
||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellComparator;
|
import org.apache.hadoop.hbase.CellComparator;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
|
@ -45,13 +46,14 @@ import org.apache.hadoop.hbase.regionserver.handler.ParallelSeekHandler;
|
||||||
import org.apache.hadoop.hbase.regionserver.querymatcher.CompactionScanQueryMatcher;
|
import org.apache.hadoop.hbase.regionserver.querymatcher.CompactionScanQueryMatcher;
|
||||||
import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher;
|
import org.apache.hadoop.hbase.regionserver.querymatcher.ScanQueryMatcher;
|
||||||
import org.apache.hadoop.hbase.regionserver.querymatcher.UserScanQueryMatcher;
|
import org.apache.hadoop.hbase.regionserver.querymatcher.UserScanQueryMatcher;
|
||||||
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
|
|
||||||
import org.apache.hadoop.hbase.util.CollectionUtils;
|
|
||||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||||
import org.apache.yetus.audience.InterfaceAudience;
|
import org.apache.yetus.audience.InterfaceAudience;
|
||||||
|
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
|
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
|
||||||
|
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Scanner scans both the memstore and the Store. Coalesce KeyValue stream into List<KeyValue>
|
* Scanner scans both the memstore and the Store. Coalesce KeyValue stream into List<KeyValue>
|
||||||
|
|
|
@ -25,6 +25,7 @@ import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.TreeSet;
|
import java.util.TreeSet;
|
||||||
|
|
||||||
|
import org.apache.commons.collections.CollectionUtils;
|
||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellComparator;
|
import org.apache.hadoop.hbase.CellComparator;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
|
@ -32,7 +33,6 @@ import org.apache.hadoop.hbase.PrivateCellUtil;
|
||||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||||
import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
|
import org.apache.hadoop.hbase.regionserver.MultiVersionConcurrencyControl;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.CollectionUtils;
|
|
||||||
import org.apache.hadoop.hbase.wal.WAL.Entry;
|
import org.apache.hadoop.hbase.wal.WAL.Entry;
|
||||||
import org.apache.hadoop.hbase.wal.WALEdit;
|
import org.apache.hadoop.hbase.wal.WALEdit;
|
||||||
import org.apache.hadoop.hbase.wal.WALKeyImpl;
|
import org.apache.hadoop.hbase.wal.WALKeyImpl;
|
||||||
|
|
Loading…
Reference in New Issue