HBASE-18704 Upgrade hbase to commons-collections 4

Upgrade commons-collections:3.2.2 to commons-collections4:4.1
Add missing dependency for hbase-procedure, hbase-thrift
Replace CircularFifoBuffer with CircularFifoQueue in WALProcedureStore and TaskMonitor

Signed-off-by: Sean Busbey <busbey@apache.org>
Signed-off-by: Chia-Ping Tsai <chia7712@gmail.com>
(cherry picked from commit 137b105c67)
This commit is contained in:
Peter Somogyi 2017-08-28 12:04:12 +02:00 committed by Sean Busbey
parent 743f3ae221
commit 33711fd481
12 changed files with 42 additions and 46 deletions

View File

@ -238,8 +238,8 @@
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<scope>compile</scope>
</dependency>
<dependency>

View File

@ -28,7 +28,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections.iterators.UnmodifiableIterator;
import org.apache.commons.collections4.iterators.UnmodifiableIterator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes;
@ -388,7 +388,7 @@ public class CompoundConfiguration extends Configuration {
}
}
return UnmodifiableIterator.decorate(ret.entrySet().iterator());
return UnmodifiableIterator.unmodifiableIterator(ret.entrySet().iterator());
}
@Override
@ -419,4 +419,4 @@ public class CompoundConfiguration extends Configuration {
public void writeXml(OutputStream out) throws IOException {
throw new UnsupportedOperationException("Immutable Configuration");
}
};
}

View File

@ -63,15 +63,15 @@
<version>${project.version}</version>
<classifier>tests</classifier>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId>
<type>test-jar</type>
<scope>test</scope>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hbase.thirdparty</groupId>
@ -97,10 +97,6 @@
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
</dependency>
<dependency>
<groupId>com.github.stephenc.findbugs</groupId>
<artifactId>findbugs-annotations</artifactId>
@ -115,6 +111,10 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-metrics-api</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
</dependency>
</dependencies>
<profiles>

View File

@ -36,7 +36,7 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.collections.buffer.CircularFifoBuffer;
import org.apache.commons.collections4.queue.CircularFifoQueue;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@ -155,7 +155,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
private int syncWaitMsec;
// Variables used for UI display
private CircularFifoBuffer syncMetricsBuffer;
private CircularFifoQueue syncMetricsQueue;
public static class SyncMetrics {
private long timestamp;
@ -229,7 +229,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
useHsync = conf.getBoolean(USE_HSYNC_CONF_KEY, DEFAULT_USE_HSYNC);
// WebUI
syncMetricsBuffer = new CircularFifoBuffer(
syncMetricsQueue = new CircularFifoQueue(
conf.getInt(STORE_WAL_SYNC_STATS_COUNT, DEFAULT_SYNC_STATS_COUNT));
// Init sync thread
@ -777,7 +777,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
syncMetrics.syncedEntries = slotIndex;
syncMetrics.totalSyncedBytes = totalSyncedToStore;
syncMetrics.syncedPerSec = syncedPerSec;
syncMetricsBuffer.add(syncMetrics);
syncMetricsQueue.add(syncMetrics);
// sync
inSync.set(true);
@ -808,7 +808,7 @@ public class WALProcedureStore extends ProcedureStoreBase {
public ArrayList<SyncMetrics> getSyncMetrics() {
lock.lock();
try {
return new ArrayList<>(syncMetricsBuffer);
return new ArrayList<>(syncMetricsQueue);
} finally {
lock.unlock();
}

View File

@ -423,8 +423,8 @@
<artifactId>commons-codec</artifactId>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>

View File

@ -30,7 +30,7 @@ import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.MinMaxPriorityQueue;
import org.apache.commons.collections.map.LinkedMap;
import org.apache.commons.collections4.map.LinkedMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;

View File

@ -27,7 +27,7 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.collections.buffer.CircularFifoBuffer;
import org.apache.commons.collections4.queue.CircularFifoQueue;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@ -61,7 +61,7 @@ public class TaskMonitor {
private final int maxTasks;
private final long rpcWarnTime;
private final long expirationTime;
private final CircularFifoBuffer tasks;
private final CircularFifoQueue tasks;
private final List<TaskAndWeakRefPair> rpcTasks;
private final long monitorInterval;
private Thread monitorThread;
@ -70,7 +70,7 @@ public class TaskMonitor {
maxTasks = conf.getInt(MAX_TASKS_KEY, DEFAULT_MAX_TASKS);
expirationTime = conf.getLong(EXPIRATION_TIME_KEY, DEFAULT_EXPIRATION_TIME);
rpcWarnTime = conf.getLong(RPC_WARN_TIME_KEY, DEFAULT_RPC_WARN_TIME);
tasks = new CircularFifoBuffer(maxTasks);
tasks = new CircularFifoQueue(maxTasks);
rpcTasks = Lists.newArrayList();
monitorInterval = conf.getLong(MONITOR_INTERVAL_KEY, DEFAULT_MONITOR_INTERVAL);
monitorThread = new Thread(new MonitorRunnable());
@ -349,6 +349,6 @@ public class TaskMonitor {
* @param task monitored task
* @return false if a task is accepted, true if it is filtered
*/
boolean filter(MonitoredTask t);
boolean filter(MonitoredTask task);
}
}

View File

@ -1,4 +1,4 @@
/*
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@ -29,8 +29,8 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.regex.Matcher;
import org.apache.commons.collections.map.AbstractReferenceMap;
import org.apache.commons.collections.map.ReferenceMap;
import org.apache.commons.collections4.map.AbstractReferenceMap;
import org.apache.commons.collections4.map.ReferenceMap;
import org.apache.commons.lang3.ClassUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -43,7 +43,6 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Append;
@ -98,7 +97,8 @@ public class RegionCoprocessorHost
private static final Log LOG = LogFactory.getLog(RegionCoprocessorHost.class);
// The shared data map
private static ReferenceMap sharedDataMap =
new ReferenceMap(AbstractReferenceMap.HARD, AbstractReferenceMap.WEAK);
new ReferenceMap(AbstractReferenceMap.ReferenceStrength.HARD,
AbstractReferenceMap.ReferenceStrength.WEAK);
// optimization: no need to call postScannerFilterRow, if no coprocessor implements it
private final boolean hasCustomPostScannerFilterRow;

View File

@ -235,8 +235,8 @@
<artifactId>commons-lang3</artifactId>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>

View File

@ -26,7 +26,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.collections4.MapUtils;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.ServerName;
@ -55,7 +55,6 @@ import org.apache.hadoop.hbase.thrift2.generated.TColumnIncrement;
import org.apache.hadoop.hbase.thrift2.generated.TColumnValue;
import org.apache.hadoop.hbase.thrift2.generated.TCompareOp;
import org.apache.hadoop.hbase.thrift2.generated.TDelete;
import org.apache.hadoop.hbase.thrift2.generated.TDeleteType;
import org.apache.hadoop.hbase.thrift2.generated.TDurability;
import org.apache.hadoop.hbase.thrift2.generated.TGet;
import org.apache.hadoop.hbase.thrift2.generated.THRegionInfo;

View File

@ -1,4 +1,4 @@
/*
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.thrift2;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@ -31,11 +31,8 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Delete;

View File

@ -1355,7 +1355,7 @@
<commons-math.version>2.2</commons-math.version>
<disruptor.version>3.3.6</disruptor.version>
<!-- Do not use versions earlier than 3.2.2 due to a security vulnerability -->
<collections.version>3.2.2</collections.version>
<collections.version>4.1</collections.version>
<httpclient.version>4.5.3</httpclient.version>
<httpcore.version>4.4.6</httpcore.version>
<metrics-core.version>3.2.1</metrics-core.version>
@ -1729,8 +1729,8 @@
<version>${metrics-core.version}</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>${collections.version}</version>
</dependency>
<dependency>