HBASE-18704 Upgrade hbase to commons-collections 4
Upgrade commons-collections:3.2.2 to commons-collections4:4.1 Add missing dependency for hbase-procedure, hbase-thrift Replace CircularFifoBuffer with CircularFifoQueue in WALProcedureStore and TaskMonitor Signed-off-by: Sean Busbey <busbey@apache.org> Signed-off-by: Chia-Ping Tsai <chia7712@gmail.com>
This commit is contained in:
parent
df34300cd3
commit
137b105c67
|
@ -238,8 +238,8 @@
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>commons-collections</groupId>
|
<groupId>org.apache.commons</groupId>
|
||||||
<artifactId>commons-collections</artifactId>
|
<artifactId>commons-collections4</artifactId>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
|
|
|
@ -28,7 +28,7 @@ import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.commons.collections.iterators.UnmodifiableIterator;
|
import org.apache.commons.collections4.iterators.UnmodifiableIterator;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
@ -388,7 +388,7 @@ public class CompoundConfiguration extends Configuration {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return UnmodifiableIterator.decorate(ret.entrySet().iterator());
|
return UnmodifiableIterator.unmodifiableIterator(ret.entrySet().iterator());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -419,4 +419,4 @@ public class CompoundConfiguration extends Configuration {
|
||||||
public void writeXml(OutputStream out) throws IOException {
|
public void writeXml(OutputStream out) throws IOException {
|
||||||
throw new UnsupportedOperationException("Immutable Configuration");
|
throw new UnsupportedOperationException("Immutable Configuration");
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
|
@ -97,10 +97,6 @@
|
||||||
<groupId>commons-cli</groupId>
|
<groupId>commons-cli</groupId>
|
||||||
<artifactId>commons-cli</artifactId>
|
<artifactId>commons-cli</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>commons-collections</groupId>
|
|
||||||
<artifactId>commons-collections</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.github.stephenc.findbugs</groupId>
|
<groupId>com.github.stephenc.findbugs</groupId>
|
||||||
<artifactId>findbugs-annotations</artifactId>
|
<artifactId>findbugs-annotations</artifactId>
|
||||||
|
@ -115,6 +111,10 @@
|
||||||
<groupId>org.apache.hbase</groupId>
|
<groupId>org.apache.hbase</groupId>
|
||||||
<artifactId>hbase-metrics-api</artifactId>
|
<artifactId>hbase-metrics-api</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.commons</groupId>
|
||||||
|
<artifactId>commons-collections4</artifactId>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<profiles>
|
<profiles>
|
||||||
|
|
|
@ -36,7 +36,7 @@ import java.util.concurrent.atomic.AtomicReference;
|
||||||
import java.util.concurrent.locks.Condition;
|
import java.util.concurrent.locks.Condition;
|
||||||
import java.util.concurrent.locks.ReentrantLock;
|
import java.util.concurrent.locks.ReentrantLock;
|
||||||
|
|
||||||
import org.apache.commons.collections.buffer.CircularFifoBuffer;
|
import org.apache.commons.collections4.queue.CircularFifoQueue;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -155,7 +155,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
|
||||||
private int syncWaitMsec;
|
private int syncWaitMsec;
|
||||||
|
|
||||||
// Variables used for UI display
|
// Variables used for UI display
|
||||||
private CircularFifoBuffer syncMetricsBuffer;
|
private CircularFifoQueue syncMetricsQueue;
|
||||||
|
|
||||||
public static class SyncMetrics {
|
public static class SyncMetrics {
|
||||||
private long timestamp;
|
private long timestamp;
|
||||||
|
@ -229,7 +229,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
|
||||||
useHsync = conf.getBoolean(USE_HSYNC_CONF_KEY, DEFAULT_USE_HSYNC);
|
useHsync = conf.getBoolean(USE_HSYNC_CONF_KEY, DEFAULT_USE_HSYNC);
|
||||||
|
|
||||||
// WebUI
|
// WebUI
|
||||||
syncMetricsBuffer = new CircularFifoBuffer(
|
syncMetricsQueue = new CircularFifoQueue(
|
||||||
conf.getInt(STORE_WAL_SYNC_STATS_COUNT, DEFAULT_SYNC_STATS_COUNT));
|
conf.getInt(STORE_WAL_SYNC_STATS_COUNT, DEFAULT_SYNC_STATS_COUNT));
|
||||||
|
|
||||||
// Init sync thread
|
// Init sync thread
|
||||||
|
@ -777,7 +777,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
|
||||||
syncMetrics.syncedEntries = slotIndex;
|
syncMetrics.syncedEntries = slotIndex;
|
||||||
syncMetrics.totalSyncedBytes = totalSyncedToStore;
|
syncMetrics.totalSyncedBytes = totalSyncedToStore;
|
||||||
syncMetrics.syncedPerSec = syncedPerSec;
|
syncMetrics.syncedPerSec = syncedPerSec;
|
||||||
syncMetricsBuffer.add(syncMetrics);
|
syncMetricsQueue.add(syncMetrics);
|
||||||
|
|
||||||
// sync
|
// sync
|
||||||
inSync.set(true);
|
inSync.set(true);
|
||||||
|
@ -808,7 +808,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
|
||||||
public ArrayList<SyncMetrics> getSyncMetrics() {
|
public ArrayList<SyncMetrics> getSyncMetrics() {
|
||||||
lock.lock();
|
lock.lock();
|
||||||
try {
|
try {
|
||||||
return new ArrayList<>(syncMetricsBuffer);
|
return new ArrayList<>(syncMetricsQueue);
|
||||||
} finally {
|
} finally {
|
||||||
lock.unlock();
|
lock.unlock();
|
||||||
}
|
}
|
||||||
|
|
|
@ -423,8 +423,8 @@
|
||||||
<artifactId>commons-codec</artifactId>
|
<artifactId>commons-codec</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>commons-collections</groupId>
|
<groupId>org.apache.commons</groupId>
|
||||||
<artifactId>commons-collections</artifactId>
|
<artifactId>commons-collections4</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hbase</groupId>
|
<groupId>org.apache.hbase</groupId>
|
||||||
|
|
|
@ -30,7 +30,7 @@ import java.util.Set;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue;
|
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue;
|
||||||
import org.apache.commons.collections.map.LinkedMap;
|
import org.apache.commons.collections4.map.LinkedMap;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
|
|
|
@ -27,7 +27,7 @@ import java.util.ArrayList;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.collections.buffer.CircularFifoBuffer;
|
import org.apache.commons.collections4.queue.CircularFifoQueue;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -61,7 +61,7 @@ public class TaskMonitor {
|
||||||
private final int maxTasks;
|
private final int maxTasks;
|
||||||
private final long rpcWarnTime;
|
private final long rpcWarnTime;
|
||||||
private final long expirationTime;
|
private final long expirationTime;
|
||||||
private final CircularFifoBuffer tasks;
|
private final CircularFifoQueue tasks;
|
||||||
private final List<TaskAndWeakRefPair> rpcTasks;
|
private final List<TaskAndWeakRefPair> rpcTasks;
|
||||||
private final long monitorInterval;
|
private final long monitorInterval;
|
||||||
private Thread monitorThread;
|
private Thread monitorThread;
|
||||||
|
@ -70,7 +70,7 @@ public class TaskMonitor {
|
||||||
maxTasks = conf.getInt(MAX_TASKS_KEY, DEFAULT_MAX_TASKS);
|
maxTasks = conf.getInt(MAX_TASKS_KEY, DEFAULT_MAX_TASKS);
|
||||||
expirationTime = conf.getLong(EXPIRATION_TIME_KEY, DEFAULT_EXPIRATION_TIME);
|
expirationTime = conf.getLong(EXPIRATION_TIME_KEY, DEFAULT_EXPIRATION_TIME);
|
||||||
rpcWarnTime = conf.getLong(RPC_WARN_TIME_KEY, DEFAULT_RPC_WARN_TIME);
|
rpcWarnTime = conf.getLong(RPC_WARN_TIME_KEY, DEFAULT_RPC_WARN_TIME);
|
||||||
tasks = new CircularFifoBuffer(maxTasks);
|
tasks = new CircularFifoQueue(maxTasks);
|
||||||
rpcTasks = Lists.newArrayList();
|
rpcTasks = Lists.newArrayList();
|
||||||
monitorInterval = conf.getLong(MONITOR_INTERVAL_KEY, DEFAULT_MONITOR_INTERVAL);
|
monitorInterval = conf.getLong(MONITOR_INTERVAL_KEY, DEFAULT_MONITOR_INTERVAL);
|
||||||
monitorThread = new Thread(new MonitorRunnable());
|
monitorThread = new Thread(new MonitorRunnable());
|
||||||
|
@ -349,6 +349,6 @@ public class TaskMonitor {
|
||||||
* @param task monitored task
|
* @param task monitored task
|
||||||
* @return false if a task is accepted, true if it is filtered
|
* @return false if a task is accepted, true if it is filtered
|
||||||
*/
|
*/
|
||||||
boolean filter(MonitoredTask t);
|
boolean filter(MonitoredTask task);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
/*
|
/**
|
||||||
*
|
*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
@ -29,8 +29,8 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.ConcurrentMap;
|
import java.util.concurrent.ConcurrentMap;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
|
|
||||||
import org.apache.commons.collections.map.AbstractReferenceMap;
|
import org.apache.commons.collections4.map.AbstractReferenceMap;
|
||||||
import org.apache.commons.collections.map.ReferenceMap;
|
import org.apache.commons.collections4.map.ReferenceMap;
|
||||||
import org.apache.commons.lang3.ClassUtils;
|
import org.apache.commons.lang3.ClassUtils;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
@ -43,7 +43,6 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
|
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.hbase.client.Append;
|
import org.apache.hadoop.hbase.client.Append;
|
||||||
|
@ -98,7 +97,8 @@ public class RegionCoprocessorHost
|
||||||
private static final Log LOG = LogFactory.getLog(RegionCoprocessorHost.class);
|
private static final Log LOG = LogFactory.getLog(RegionCoprocessorHost.class);
|
||||||
// The shared data map
|
// The shared data map
|
||||||
private static ReferenceMap sharedDataMap =
|
private static ReferenceMap sharedDataMap =
|
||||||
new ReferenceMap(AbstractReferenceMap.HARD, AbstractReferenceMap.WEAK);
|
new ReferenceMap(AbstractReferenceMap.ReferenceStrength.HARD,
|
||||||
|
AbstractReferenceMap.ReferenceStrength.WEAK);
|
||||||
|
|
||||||
// optimization: no need to call postScannerFilterRow, if no coprocessor implements it
|
// optimization: no need to call postScannerFilterRow, if no coprocessor implements it
|
||||||
private final boolean hasCustomPostScannerFilterRow;
|
private final boolean hasCustomPostScannerFilterRow;
|
||||||
|
|
|
@ -235,8 +235,8 @@
|
||||||
<artifactId>commons-lang3</artifactId>
|
<artifactId>commons-lang3</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>commons-collections</groupId>
|
<groupId>org.apache.commons</groupId>
|
||||||
<artifactId>commons-collections</artifactId>
|
<artifactId>commons-collections4</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hbase</groupId>
|
<groupId>org.apache.hbase</groupId>
|
||||||
|
|
|
@ -26,7 +26,7 @@ import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.commons.collections.MapUtils;
|
import org.apache.commons.collections4.MapUtils;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionLocation;
|
import org.apache.hadoop.hbase.HRegionLocation;
|
||||||
import org.apache.hadoop.hbase.ServerName;
|
import org.apache.hadoop.hbase.ServerName;
|
||||||
|
@ -55,7 +55,6 @@ import org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement;
|
||||||
import org.apache.hadoop.hbase.thrift2.generated.TColumnValue;
|
import org.apache.hadoop.hbase.thrift2.generated.TColumnValue;
|
||||||
import org.apache.hadoop.hbase.thrift2.generated.TCompareOp;
|
import org.apache.hadoop.hbase.thrift2.generated.TCompareOp;
|
||||||
import org.apache.hadoop.hbase.thrift2.generated.TDelete;
|
import org.apache.hadoop.hbase.thrift2.generated.TDelete;
|
||||||
import org.apache.hadoop.hbase.thrift2.generated.TDeleteType;
|
|
||||||
import org.apache.hadoop.hbase.thrift2.generated.TDurability;
|
import org.apache.hadoop.hbase.thrift2.generated.TDurability;
|
||||||
import org.apache.hadoop.hbase.thrift2.generated.TGet;
|
import org.apache.hadoop.hbase.thrift2.generated.TGet;
|
||||||
import org.apache.hadoop.hbase.thrift2.generated.THRegionInfo;
|
import org.apache.hadoop.hbase.thrift2.generated.THRegionInfo;
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
/*
|
/**
|
||||||
*
|
*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
@ -19,7 +19,7 @@
|
||||||
package org.apache.hadoop.hbase.thrift2;
|
package org.apache.hadoop.hbase.thrift2;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
|
||||||
import org.apache.commons.collections.CollectionUtils;
|
import org.apache.commons.collections4.CollectionUtils;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -31,11 +31,8 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.Admin;
|
import org.apache.hadoop.hbase.client.Admin;
|
||||||
import org.apache.hadoop.hbase.client.Connection;
|
|
||||||
import org.apache.hadoop.hbase.client.ConnectionFactory;
|
|
||||||
import org.apache.hadoop.hbase.client.Get;
|
import org.apache.hadoop.hbase.client.Get;
|
||||||
import org.apache.hadoop.hbase.client.Put;
|
import org.apache.hadoop.hbase.client.Put;
|
||||||
import org.apache.hadoop.hbase.client.Result;
|
|
||||||
import org.apache.hadoop.hbase.client.Scan;
|
import org.apache.hadoop.hbase.client.Scan;
|
||||||
import org.apache.hadoop.hbase.client.Increment;
|
import org.apache.hadoop.hbase.client.Increment;
|
||||||
import org.apache.hadoop.hbase.client.Delete;
|
import org.apache.hadoop.hbase.client.Delete;
|
||||||
|
|
6
pom.xml
6
pom.xml
|
@ -1393,7 +1393,7 @@
|
||||||
<commons-math.version>2.2</commons-math.version>
|
<commons-math.version>2.2</commons-math.version>
|
||||||
<disruptor.version>3.3.6</disruptor.version>
|
<disruptor.version>3.3.6</disruptor.version>
|
||||||
<!-- Do not use versions earlier than 3.2.2 due to a security vulnerability -->
|
<!-- Do not use versions earlier than 3.2.2 due to a security vulnerability -->
|
||||||
<collections.version>3.2.2</collections.version>
|
<collections.version>4.1</collections.version>
|
||||||
<httpclient.version>4.5.3</httpclient.version>
|
<httpclient.version>4.5.3</httpclient.version>
|
||||||
<httpcore.version>4.4.6</httpcore.version>
|
<httpcore.version>4.4.6</httpcore.version>
|
||||||
<metrics-core.version>3.2.1</metrics-core.version>
|
<metrics-core.version>3.2.1</metrics-core.version>
|
||||||
|
@ -1774,8 +1774,8 @@
|
||||||
<version>${metrics-core.version}</version>
|
<version>${metrics-core.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>commons-collections</groupId>
|
<groupId>org.apache.commons</groupId>
|
||||||
<artifactId>commons-collections</artifactId>
|
<artifactId>commons-collections4</artifactId>
|
||||||
<version>${collections.version}</version>
|
<version>${collections.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
|
|
Loading…
Reference in New Issue