HBASE-18674 upgrading to commons-lang3

This commit is contained in:
Umesh Agashe 2017-08-24 11:10:17 -07:00 committed by Michael Stack
parent a2b110e0fa
commit 5dacc85122
149 changed files with 289 additions and 277 deletions

View File

@ -87,7 +87,7 @@ RUN apt-get -q update && apt-get -q install --no-install-recommends -y \
# Fixing the Apache commons / Maven dependency problem under Ubuntu: # Fixing the Apache commons / Maven dependency problem under Ubuntu:
# See http://wiki.apache.org/commons/VfsProblems # See http://wiki.apache.org/commons/VfsProblems
RUN cd /usr/share/maven/lib && ln -s ../../java/commons-lang.jar . RUN cd /usr/share/maven/lib && ln -s ../../java/commons-lang3-3.6.jar .
###### ######
# Install findbugs # Install findbugs

View File

@ -140,8 +140,8 @@
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-logging</groupId> <groupId>commons-logging</groupId>

View File

@ -29,7 +29,7 @@ import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;

View File

@ -36,7 +36,7 @@ import java.util.List;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -26,7 +26,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -40,7 +40,7 @@ import java.util.List;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;

View File

@ -31,7 +31,7 @@ import java.util.Set;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.TreeSet; import java.util.TreeSet;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -29,7 +29,7 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.TreeMap; import java.util.TreeMap;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;

View File

@ -29,7 +29,7 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.TreeSet; import java.util.TreeSet;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -25,7 +25,7 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -22,7 +22,7 @@ import static org.apache.hadoop.hbase.backup.util.BackupUtils.succeeded;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.backup.util;
import java.util.List; import java.util.List;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;

View File

@ -17,7 +17,7 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
@ -342,4 +342,4 @@ public class TestFullRestore extends TestBackupBase {
int ret = ToolRunner.run(conf1, new RestoreDriver(), args); int ret = ToolRunner.run(conf1, new RestoreDriver(), args);
assertTrue(ret != 0); assertTrue(ret != 0);
} }
} }

View File

@ -26,7 +26,7 @@ import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -144,8 +144,8 @@
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-logging</groupId> <groupId>commons-logging</groupId>

View File

@ -29,7 +29,7 @@ import java.util.LinkedList;
import java.util.Queue; import java.util.Queue;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import org.apache.commons.lang.mutable.MutableBoolean; import org.apache.commons.lang3.mutable.MutableBoolean;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import org.apache.commons.lang.mutable.MutableBoolean; import org.apache.commons.lang3.mutable.MutableBoolean;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -129,4 +129,4 @@ class FastFailInterceptorContext extends RetryingCallerInterceptorContext {
this.tries = tries; this.tries = tries;
return this; return this;
} }
} }

View File

@ -26,7 +26,7 @@ import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import org.apache.commons.lang.mutable.MutableBoolean; import org.apache.commons.lang3.mutable.MutableBoolean;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -29,7 +29,7 @@ import static org.apache.hadoop.hbase.zookeeper.RecoverableZooKeeper.removeMetaD
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import org.apache.commons.lang.mutable.MutableInt; import org.apache.commons.lang3.mutable.MutableInt;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFramework;
@ -248,4 +248,4 @@ class ZKAsyncRegistry implements AsyncRegistry {
public void close() { public void close() {
zk.close(); zk.close();
} }
} }

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase.client.replication; package org.apache.hadoop.hbase.client.replication;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString; import org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.client.replication;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;

View File

@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.shaded.io.netty.channel.EventLoopGroup;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;

View File

@ -18,7 +18,7 @@ package org.apache.hadoop.hbase.quotas;
import java.util.Objects; import java.util.Objects;
import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos;

View File

@ -35,7 +35,7 @@ import java.util.Map;
import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag; import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -34,7 +34,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -461,7 +461,7 @@ public class TestClientNoCluster extends Configured implements Tool {
@Override @Override
public MutateResponse mutate(RpcController controller, public MutateResponse mutate(RpcController controller,
MutateRequest request) throws ServiceException { MutateRequest request) throws ServiceException {
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
@Override @Override
@ -476,14 +476,14 @@ public class TestClientNoCluster extends Configured implements Tool {
public BulkLoadHFileResponse bulkLoadHFile( public BulkLoadHFileResponse bulkLoadHFile(
RpcController controller, BulkLoadHFileRequest request) RpcController controller, BulkLoadHFileRequest request)
throws ServiceException { throws ServiceException {
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
@Override @Override
public CoprocessorServiceResponse execService( public CoprocessorServiceResponse execService(
RpcController controller, CoprocessorServiceRequest request) RpcController controller, CoprocessorServiceRequest request)
throws ServiceException { throws ServiceException {
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
@Override @Override
@ -505,19 +505,19 @@ public class TestClientNoCluster extends Configured implements Tool {
@Override @Override
public CoprocessorServiceResponse execRegionServerService(RpcController controller, public CoprocessorServiceResponse execRegionServerService(RpcController controller,
CoprocessorServiceRequest request) throws ServiceException { CoprocessorServiceRequest request) throws ServiceException {
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
@Override @Override
public PrepareBulkLoadResponse prepareBulkLoad(RpcController controller, public PrepareBulkLoadResponse prepareBulkLoad(RpcController controller,
PrepareBulkLoadRequest request) throws ServiceException { PrepareBulkLoadRequest request) throws ServiceException {
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
@Override @Override
public CleanupBulkLoadResponse cleanupBulkLoad(RpcController controller, public CleanupBulkLoadResponse cleanupBulkLoad(RpcController controller,
CleanupBulkLoadRequest request) throws ServiceException { CleanupBulkLoadRequest request) throws ServiceException {
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
} }

View File

@ -23,7 +23,7 @@ import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays; import java.util.Arrays;
import org.apache.commons.lang.time.StopWatch; import org.apache.commons.lang3.time.StopWatch;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger; import org.apache.commons.logging.impl.Log4JLogger;

View File

@ -233,8 +233,8 @@
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang</artifactId> <artifactId>commons-lang3</artifactId>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -27,7 +27,7 @@ import java.util.List;
import java.util.UUID; import java.util.UUID;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -1364,6 +1364,8 @@ public final class HConstants {
public static final String DEFAULT_SNAPSHOT_RESTORE_FAILSAFE_NAME = public static final String DEFAULT_SNAPSHOT_RESTORE_FAILSAFE_NAME =
"hbase-failsafe-{snapshot.name}-{restore.timestamp}"; "hbase-failsafe-{snapshot.name}-{restore.timestamp}";
public static final String NOT_IMPLEMENTED = "Not implemented";
private HConstants() { private HConstants() {
// Can't be instantiated with this ctor. // Can't be instantiated with this ctor.
} }

View File

@ -25,7 +25,7 @@ import java.io.OutputStream;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Iterator; import java.util.Iterator;
import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;

View File

@ -21,7 +21,8 @@ package org.apache.hadoop.hbase.util;
import java.util.Iterator; import java.util.Iterator;
import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
/** /**
@ -106,7 +107,7 @@ public class PairOfSameType<T> implements Iterable<T> {
@Override @Override
public void remove() { public void remove() {
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
}; };
} }

View File

@ -181,8 +181,8 @@ limitations under the License.
<version>${hadoop-two.version}</version> <version>${hadoop-two.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-logging</groupId> <groupId>commons-logging</groupId>

View File

@ -34,7 +34,7 @@ package org.apache.hadoop.hbase.metrics.impl;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.metrics.Counter; import org.apache.hadoop.hbase.metrics.Counter;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.metrics2.lib; package org.apache.hadoop.metrics2.lib;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.Histogram; import org.apache.hadoop.hbase.metrics.Histogram;
import org.apache.hadoop.hbase.metrics.Interns; import org.apache.hadoop.hbase.metrics.Interns;

View File

@ -267,8 +267,8 @@
<artifactId>commons-math</artifactId> <artifactId>commons-math</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.htrace</groupId> <groupId>org.apache.htrace</groupId>

View File

@ -286,13 +286,13 @@ public class DistributedHBaseCluster extends HBaseCluster {
@Override @Override
public void waitUntilShutDown() { public void waitUntilShutDown() {
// Simply wait for a few seconds for now (after issuing serverManager.kill // Simply wait for a few seconds for now (after issuing serverManager.kill
throw new RuntimeException("Not implemented yet"); throw new RuntimeException(HConstants.NOT_IMPLEMENTED);
} }
@Override @Override
public void shutdown() throws IOException { public void shutdown() throws IOException {
// not sure we want this // not sure we want this
throw new RuntimeException("Not implemented yet"); throw new RuntimeException(HConstants.NOT_IMPLEMENTED);
} }
@Override @Override

View File

@ -23,7 +23,7 @@ import java.io.IOException;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;

View File

@ -24,7 +24,7 @@ import java.util.Properties;
import java.util.Set; import java.util.Set;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -25,8 +25,8 @@ import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -257,7 +257,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
return null; return null;
} }
ArrayList<String> namespaceList = new ArrayList<>(namespaceMap.keySet()); ArrayList<String> namespaceList = new ArrayList<>(namespaceMap.keySet());
String randomKey = namespaceList.get(RandomUtils.nextInt(namespaceList.size())); String randomKey = namespaceList.get(RandomUtils.nextInt(0, namespaceList.size()));
NamespaceDescriptor randomNsd = namespaceMap.get(randomKey); NamespaceDescriptor randomNsd = namespaceMap.get(randomKey);
// remove from namespaceMap // remove from namespaceMap
namespaceMap.remove(randomKey); namespaceMap.remove(randomKey);
@ -306,12 +306,12 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
private NamespaceDescriptor createNamespaceDesc() { private NamespaceDescriptor createNamespaceDesc() {
String namespaceName = "itnamespace" + String.format("%010d", String namespaceName = "itnamespace" + String.format("%010d",
RandomUtils.nextInt(Integer.MAX_VALUE)); RandomUtils.nextInt());
NamespaceDescriptor nsd = NamespaceDescriptor.create(namespaceName).build(); NamespaceDescriptor nsd = NamespaceDescriptor.create(namespaceName).build();
nsd.setConfiguration( nsd.setConfiguration(
nsTestConfigKey, nsTestConfigKey,
String.format("%010d", RandomUtils.nextInt(Integer.MAX_VALUE))); String.format("%010d", RandomUtils.nextInt()));
return nsd; return nsd;
} }
} }
@ -331,7 +331,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
NamespaceDescriptor modifiedNsd = NamespaceDescriptor.create(namespaceName).build(); NamespaceDescriptor modifiedNsd = NamespaceDescriptor.create(namespaceName).build();
String nsValueNew; String nsValueNew;
do { do {
nsValueNew = String.format("%010d", RandomUtils.nextInt(Integer.MAX_VALUE)); nsValueNew = String.format("%010d", RandomUtils.nextInt());
} while (selected.getConfigurationValue(nsTestConfigKey).equals(nsValueNew)); } while (selected.getConfigurationValue(nsTestConfigKey).equals(nsValueNew));
modifiedNsd.setConfiguration(nsTestConfigKey, nsValueNew); modifiedNsd.setConfiguration(nsTestConfigKey, nsValueNew);
admin.modifyNamespace(modifiedNsd); admin.modifyNamespace(modifiedNsd);
@ -397,7 +397,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
return null; return null;
} }
ArrayList<TableName> tableList = new ArrayList<>(tableMap.keySet()); ArrayList<TableName> tableList = new ArrayList<>(tableMap.keySet());
TableName randomKey = tableList.get(RandomUtils.nextInt(tableList.size())); TableName randomKey = tableList.get(RandomUtils.nextInt(0, tableList.size()));
TableDescriptor randomTd = tableMap.remove(randomKey); TableDescriptor randomTd = tableMap.remove(randomKey);
return randomTd; return randomTd;
} }
@ -436,7 +436,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
} }
private TableDescriptor createTableDesc() { private TableDescriptor createTableDesc() {
String tableName = String.format("ittable-%010d", RandomUtils.nextInt(Integer.MAX_VALUE)); String tableName = String.format("ittable-%010d", RandomUtils.nextInt());
String familyName = "cf-" + Math.abs(RandomUtils.nextInt()); String familyName = "cf-" + Math.abs(RandomUtils.nextInt());
return TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName)) return TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName))
.addColumnFamily(ColumnFamilyDescriptorBuilder.of(familyName)) .addColumnFamily(ColumnFamilyDescriptorBuilder.of(familyName))
@ -581,7 +581,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
LOG.info("No column families in table: " + td); LOG.info("No column families in table: " + td);
return null; return null;
} }
ColumnFamilyDescriptor randomCfd = families[RandomUtils.nextInt(families.length)]; ColumnFamilyDescriptor randomCfd = families[RandomUtils.nextInt(0, families.length)];
return randomCfd; return randomCfd;
} }
} }
@ -624,7 +624,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
} }
private ColumnFamilyDescriptor createFamilyDesc() { private ColumnFamilyDescriptor createFamilyDesc() {
String familyName = String.format("cf-%010d", RandomUtils.nextInt(Integer.MAX_VALUE)); String familyName = String.format("cf-%010d", RandomUtils.nextInt());
return ColumnFamilyDescriptorBuilder.of(familyName); return ColumnFamilyDescriptorBuilder.of(familyName);
} }
} }
@ -643,7 +643,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
} }
Admin admin = connection.getAdmin(); Admin admin = connection.getAdmin();
int versions = RandomUtils.nextInt(10) + 3; int versions = RandomUtils.nextInt(0, 10) + 3;
try { try {
TableName tableName = selected.getTableName(); TableName tableName = selected.getTableName();
LOG.info("Altering versions of column family: " + columnDesc + " to: " + versions + LOG.info("Altering versions of column family: " + columnDesc + " to: " + versions +
@ -698,7 +698,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
TableName tableName = selected.getTableName(); TableName tableName = selected.getTableName();
// possible DataBlockEncoding ids // possible DataBlockEncoding ids
int[] possibleIds = {0, 2, 3, 4, 6}; int[] possibleIds = {0, 2, 3, 4, 6};
short id = (short) possibleIds[RandomUtils.nextInt(possibleIds.length)]; short id = (short) possibleIds[RandomUtils.nextInt(0, possibleIds.length)];
LOG.info("Altering encoding of column family: " + columnDesc + " to: " + id + LOG.info("Altering encoding of column family: " + columnDesc + " to: " + id +
" in table: " + tableName); " in table: " + tableName);
@ -789,13 +789,13 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
for (int i = 0; i < numRows; i++){ for (int i = 0; i < numRows; i++){
// nextInt(Integer.MAX_VALUE)) to return positive numbers only // nextInt(Integer.MAX_VALUE)) to return positive numbers only
byte[] rowKey = Bytes.toBytes( byte[] rowKey = Bytes.toBytes(
"row-" + String.format("%010d", RandomUtils.nextInt(Integer.MAX_VALUE))); "row-" + String.format("%010d", RandomUtils.nextInt()));
ColumnFamilyDescriptor cfd = selectFamily(selected); ColumnFamilyDescriptor cfd = selectFamily(selected);
if (cfd == null){ if (cfd == null){
return; return;
} }
byte[] family = cfd.getName(); byte[] family = cfd.getName();
byte[] qualifier = Bytes.toBytes("col-" + RandomUtils.nextInt(Integer.MAX_VALUE) % 10); byte[] qualifier = Bytes.toBytes("col-" + RandomUtils.nextInt() % 10);
byte[] value = Bytes.toBytes("val-" + RandomStringUtils.randomAlphanumeric(10)); byte[] value = Bytes.toBytes("val-" + RandomStringUtils.randomAlphanumeric(10));
Put put = new Put(rowKey); Put put = new Put(rowKey);
put.addColumn(family, qualifier, value); put.addColumn(family, qualifier, value);
@ -873,7 +873,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
break; break;
case DELETE_TABLE: case DELETE_TABLE:
// reduce probability of deleting table to 20% // reduce probability of deleting table to 20%
if (RandomUtils.nextInt(100) < 20) { if (RandomUtils.nextInt(0, 100) < 20) {
new DeleteTableAction().perform(); new DeleteTableAction().perform();
} }
break; break;
@ -882,7 +882,7 @@ public class IntegrationTestDDLMasterFailover extends IntegrationTestBase {
break; break;
case DELETE_COLUMNFAMILY: case DELETE_COLUMNFAMILY:
// reduce probability of deleting column family to 20% // reduce probability of deleting column family to 20%
if (RandomUtils.nextInt(100) < 20) { if (RandomUtils.nextInt(0, 100) < 20) {
new DeleteColumnFamilyAction().perform(); new DeleteColumnFamilyAction().perform();
} }
break; break;

View File

@ -22,7 +22,7 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;

View File

@ -23,7 +23,7 @@ import java.util.Locale;
import java.util.Set; import java.util.Set;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;

View File

@ -26,7 +26,7 @@ import java.util.List;
import java.util.function.BiConsumer; import java.util.function.BiConsumer;
import java.util.function.Consumer; import java.util.function.Consumer;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -208,7 +208,7 @@ public class Action {
int victimRegionCount = (int)Math.ceil(fractionOfRegions * regions.size()); int victimRegionCount = (int)Math.ceil(fractionOfRegions * regions.size());
LOG.debug("Removing " + victimRegionCount + " regions from " + server.getServerName()); LOG.debug("Removing " + victimRegionCount + " regions from " + server.getServerName());
for (int i = 0; i < victimRegionCount; ++i) { for (int i = 0; i < victimRegionCount; ++i) {
int victimIx = RandomUtils.nextInt(regions.size()); int victimIx = RandomUtils.nextInt(0, regions.size());
String regionId = HRegionInfo.encodeRegionName(regions.remove(victimIx)); String regionId = HRegionInfo.encodeRegionName(regions.remove(victimIx));
victimRegions.add(Bytes.toBytes(regionId)); victimRegions.add(Bytes.toBytes(regionId));
} }
@ -223,7 +223,7 @@ public class Action {
if (context.isStopping()) { if (context.isStopping()) {
break; break;
} }
int targetIx = RandomUtils.nextInt(toServers.size()); int targetIx = RandomUtils.nextInt(0, toServers.size());
admin.move(victimRegion, Bytes.toBytes(toServers.get(targetIx).getServerName())); admin.move(victimRegion, Bytes.toBytes(toServers.get(targetIx).getServerName()));
} }
} }

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.chaos.actions;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.chaos.actions; package org.apache.hadoop.hbase.chaos.actions;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
@ -47,7 +47,7 @@ public class CompactMobAction extends Action {
public void perform() throws Exception { public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility(); HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin(); Admin admin = util.getAdmin();
boolean major = RandomUtils.nextInt(100) < majorRatio; boolean major = RandomUtils.nextInt(0, 100) < majorRatio;
// Don't try the modify if we're stopping // Don't try the modify if we're stopping
if (context.isStopping()) { if (context.isStopping()) {

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.chaos.actions;
import java.util.List; import java.util.List;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
@ -51,7 +51,7 @@ public class CompactRandomRegionOfTableAction extends Action {
public void perform() throws Exception { public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility(); HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin(); Admin admin = util.getAdmin();
boolean major = RandomUtils.nextInt(100) < majorRatio; boolean major = RandomUtils.nextInt(0, 100) < majorRatio;
LOG.info("Performing action: Compact random region of table " LOG.info("Performing action: Compact random region of table "
+ tableName + ", major=" + major); + tableName + ", major=" + major);

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.chaos.actions; package org.apache.hadoop.hbase.chaos.actions;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
@ -46,7 +46,7 @@ public class CompactTableAction extends Action {
public void perform() throws Exception { public void perform() throws Exception {
HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility(); HBaseTestingUtility util = context.getHBaseIntegrationTestingUtility();
Admin admin = util.getAdmin(); Admin admin = util.getAdmin();
boolean major = RandomUtils.nextInt(100) < majorRatio; boolean major = RandomUtils.nextInt(0, 100) < majorRatio;
LOG.info("Performing action: Compact table " + tableName + ", major=" + major); LOG.info("Performing action: Compact table " + tableName + ", major=" + major);
try { try {

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.chaos.actions;
import java.util.List; import java.util.List;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
@ -54,7 +54,7 @@ public class MergeRandomAdjacentRegionsOfTableAction extends Action {
return; return;
} }
int i = RandomUtils.nextInt(regions.size() - 1); int i = RandomUtils.nextInt(0, regions.size() - 1);
HRegionInfo a = regions.get(i++); HRegionInfo a = regions.get(i++);
HRegionInfo b = regions.get(i); HRegionInfo b = regions.get(i);
LOG.debug("Merging " + a.getRegionNameAsString() + " and " + b.getRegionNameAsString()); LOG.debug("Merging " + a.getRegionNameAsString() + " and " + b.getRegionNameAsString());

View File

@ -22,7 +22,7 @@ import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
@ -77,7 +77,7 @@ public class MoveRegionsOfTableAction extends Action {
try { try {
String destServerName = String destServerName =
servers[RandomUtils.nextInt(servers.length)].getServerName(); servers[RandomUtils.nextInt(0, servers.length)].getServerName();
LOG.debug("Moving " + regionInfo.getRegionNameAsString() + " to " + destServerName); LOG.debug("Moving " + regionInfo.getRegionNameAsString() + " to " + destServerName);
admin.move(regionInfo.getEncodedNameAsBytes(), Bytes.toBytes(destServerName)); admin.move(regionInfo.getEncodedNameAsBytes(), Bytes.toBytes(destServerName));
} catch (Exception ex) { } catch (Exception ex) {

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.chaos.actions;
import java.util.List; import java.util.List;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.RegionLocator;
@ -41,6 +41,6 @@ public class RestartRsHoldingTableAction extends RestartActionBaseAction {
LOG.info("Performing action: Restart random RS holding table " + this.locator.getName()); LOG.info("Performing action: Restart random RS holding table " + this.locator.getName());
List<HRegionLocation> locations = locator.getAllRegionLocations(); List<HRegionLocation> locations = locator.getAllRegionLocations();
restartRs(locations.get(RandomUtils.nextInt(locations.size())).getServerName(), sleepTime); restartRs(locations.get(RandomUtils.nextInt(0, locations.size())).getServerName(), sleepTime);
} }
} }

View File

@ -24,7 +24,7 @@ import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Queue; import java.util.Queue;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
@ -104,7 +104,7 @@ public class RollingBatchRestartRsAction extends BatchRestartRsAction {
break; break;
} }
sleep(RandomUtils.nextInt((int)sleepTime)); sleep(RandomUtils.nextInt(0, (int)sleepTime));
} }
} }
@ -149,4 +149,4 @@ public class RollingBatchRestartRsAction extends BatchRestartRsAction {
action.perform(); action.perform();
} }
} }

View File

@ -24,7 +24,7 @@ import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.junit.Assert; import org.junit.Assert;
@ -65,7 +65,7 @@ public class UnbalanceKillAndRebalanceAction extends Action {
liveCount + deadCount < victimServers.size()); liveCount + deadCount < victimServers.size());
List<ServerName> targetServers = new ArrayList<>(liveCount); List<ServerName> targetServers = new ArrayList<>(liveCount);
for (int i = 0; i < liveCount + deadCount; ++i) { for (int i = 0; i < liveCount + deadCount; ++i) {
int victimIx = RandomUtils.nextInt(victimServers.size()); int victimIx = RandomUtils.nextInt(0, victimServers.size());
targetServers.add(victimServers.remove(victimIx)); targetServers.add(victimServers.remove(victimIx));
} }
unbalanceRegions(status, victimServers, targetServers, HOARD_FRC_OF_REGIONS); unbalanceRegions(status, victimServers, targetServers, HOARD_FRC_OF_REGIONS);

View File

@ -22,7 +22,7 @@ import java.util.ArrayList;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
@ -52,7 +52,7 @@ public class UnbalanceRegionsAction extends Action {
int targetServerCount = (int)Math.ceil(fractionOfServers * victimServers.size()); int targetServerCount = (int)Math.ceil(fractionOfServers * victimServers.size());
List<ServerName> targetServers = new ArrayList<>(targetServerCount); List<ServerName> targetServers = new ArrayList<>(targetServerCount);
for (int i = 0; i < targetServerCount; ++i) { for (int i = 0; i < targetServerCount; ++i) {
int victimIx = RandomUtils.nextInt(victimServers.size()); int victimIx = RandomUtils.nextInt(0, victimServers.size());
targetServers.add(victimServers.remove(victimIx)); targetServers.add(victimServers.remove(victimIx));
} }
unbalanceRegions(status, victimServers, targetServers, fractionOfRegions); unbalanceRegions(status, victimServers, targetServers, fractionOfRegions);

View File

@ -22,7 +22,7 @@ import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.IntegrationTestingUtility; import org.apache.hadoop.hbase.IntegrationTestingUtility;
@ -61,7 +61,7 @@ public class PolicyBasedChaosMonkey extends ChaosMonkey {
/** Selects a random item from the given items */ /** Selects a random item from the given items */
public static <T> T selectRandomItem(T[] items) { public static <T> T selectRandomItem(T[] items) {
return items[RandomUtils.nextInt(items.length)]; return items[RandomUtils.nextInt(0, items.length)];
} }
/** Selects a random item from the given items with weights*/ /** Selects a random item from the given items with weights*/
@ -71,7 +71,7 @@ public class PolicyBasedChaosMonkey extends ChaosMonkey {
totalWeight += pair.getSecond(); totalWeight += pair.getSecond();
} }
int cutoff = RandomUtils.nextInt(totalWeight); int cutoff = RandomUtils.nextInt(0, totalWeight);
int cummulative = 0; int cummulative = 0;
T item = null; T item = null;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.chaos.policies; package org.apache.hadoop.hbase.chaos.policies;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Threads;
/** A policy which does stuff every time interval. */ /** A policy which does stuff every time interval. */
@ -32,7 +32,7 @@ public abstract class PeriodicPolicy extends Policy {
@Override @Override
public void run() { public void run() {
// Add some jitter. // Add some jitter.
int jitter = RandomUtils.nextInt((int) periodMs); int jitter = RandomUtils.nextInt(0, (int) periodMs);
LOG.info("Sleeping for " + jitter + " to add jitter"); LOG.info("Sleeping for " + jitter + " to add jitter");
Threads.sleep(jitter); Threads.sleep(jitter);

View File

@ -22,7 +22,7 @@ import java.util.Properties;
import java.util.Set; import java.util.Set;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -24,7 +24,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -29,7 +29,7 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.math.stat.descriptive.DescriptiveStatistics; import org.apache.commons.math.stat.descriptive.DescriptiveStatistics;

View File

@ -26,7 +26,7 @@ import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -34,7 +34,7 @@ import java.util.UUID;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -26,7 +26,7 @@ import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -24,7 +24,7 @@ import java.util.ArrayList;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -43,7 +43,7 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -93,8 +93,8 @@
<artifactId>commons-logging</artifactId> <artifactId>commons-logging</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.hbase.thirdparty</groupId> <groupId>org.apache.hbase.thirdparty</groupId>

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.metrics; package org.apache.hadoop.hbase.metrics;
import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
/** /**

View File

@ -143,8 +143,8 @@
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-logging</groupId> <groupId>commons-logging</groupId>

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase.replication; package org.apache.hadoop.hbase.replication;
import org.apache.commons.lang.reflect.ConstructorUtils; import org.apache.commons.lang3.reflect.ConstructorUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.Abortable;

View File

@ -18,9 +18,10 @@
*/ */
package org.apache.hadoop.hbase.replication; package org.apache.hadoop.hbase.replication;
import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
@ -95,18 +96,18 @@ public class TableBasedReplicationQueuesClientImpl extends ReplicationTableBase
@Override @Override
public int getHFileRefsNodeChangeVersion() throws KeeperException { public int getHFileRefsNodeChangeVersion() throws KeeperException {
// TODO // TODO
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
@Override @Override
public List<String> getAllPeersFromHFileRefsQueue() throws KeeperException { public List<String> getAllPeersFromHFileRefsQueue() throws KeeperException {
// TODO // TODO
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
@Override @Override
public List<String> getReplicableHFiles(String peerId) throws KeeperException { public List<String> getReplicableHFiles(String peerId) throws KeeperException {
// TODO // TODO
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
} }

View File

@ -18,13 +18,14 @@
*/ */
package org.apache.hadoop.hbase.replication; package org.apache.hadoop.hbase.replication;
import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
@ -45,11 +46,8 @@ import org.apache.zookeeper.KeeperException;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.SortedSet; import java.util.SortedSet;
import java.util.TreeSet; import java.util.TreeSet;
@ -298,26 +296,26 @@ public class TableBasedReplicationQueuesImpl extends ReplicationTableBase
@Override @Override
public void addPeerToHFileRefs(String peerId) throws ReplicationException { public void addPeerToHFileRefs(String peerId) throws ReplicationException {
// TODO // TODO
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
@Override @Override
public void removePeerFromHFileRefs(String peerId) { public void removePeerFromHFileRefs(String peerId) {
// TODO // TODO
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
@Override @Override
public void addHFileRefs(String peerId, List<Pair<Path, Path>> pairs) public void addHFileRefs(String peerId, List<Pair<Path, Path>> pairs)
throws ReplicationException { throws ReplicationException {
// TODO // TODO
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
@Override @Override
public void removeHFileRefs(String peerId, List<String> files) { public void removeHFileRefs(String peerId, List<String> files) {
// TODO // TODO
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
private String buildQueueRowKey(String queueId) { private String buildQueueRowKey(String queueId) {

View File

@ -198,6 +198,13 @@ JRuby's licence.
## Skip racc, json-generator, json-parser because it is under Ruby license ## Skip racc, json-generator, json-parser because it is under Ruby license
## jruby bundles jquery, but we already ref that above. ## jruby bundles jquery, but we already ref that above.
#end
#macro(commons_lang3_notice)
--
## commons-lang3 has following note in the NOTICE file
This product includes software from the Spring Framework,
under the Apache License 2.0 (see: StringUtils.containsWhitespace())
#end #end
## first bundled source ## first bundled source
#if(${bundled-logo}) #if(${bundled-logo})
@ -255,6 +262,11 @@ JRuby's licence.
#if(${dep.artifactId.equals("jruby-complete")}) #if(${dep.artifactId.equals("jruby-complete")})
#set($jruby=true) #set($jruby=true)
#end #end
#if(${dep.artifactId.equals("commons-lang3")})
#if(${dep.groupId.equals("org.apache.commons")})
#commons_lang3_notice()
#end
#end
#if( ${dep.licenses.isEmpty()} ) #if( ${dep.licenses.isEmpty()} )
ERROR: This product includes ${dep.name} which has no licenses! ERROR: This product includes ${dep.name} which has no licenses!
Revert the change if invalid or if intentional add license info to supplemental-models.xml Revert the change if invalid or if intentional add license info to supplemental-models.xml

View File

@ -268,8 +268,8 @@
<artifactId>httpcore</artifactId> <artifactId>httpcore</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-logging</groupId> <groupId>commons-logging</groupId>

View File

@ -32,7 +32,7 @@ import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;

View File

@ -36,7 +36,7 @@ import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.UriInfo; import javax.ws.rs.core.UriInfo;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;

View File

@ -31,7 +31,7 @@ import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context; import javax.ws.rs.core.Context;
import javax.ws.rs.core.UriInfo; import javax.ws.rs.core.UriInfo;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;

View File

@ -28,9 +28,9 @@ import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlValue; import javax.xml.bind.annotation.XmlValue;
import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.hadoop.hbase.util.ByteStringer; import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;

View File

@ -30,9 +30,9 @@ import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.codehaus.jackson.annotate.JsonProperty; import org.codehaus.jackson.annotate.JsonProperty;

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase.rest; package org.apache.hadoop.hbase.rest;
import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -19,9 +19,9 @@
package org.apache.hadoop.hbase.rest.model; package org.apache.hadoop.hbase.rest.model;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.apache.hadoop.hbase.testclassification.RestTests; import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;

View File

@ -23,9 +23,9 @@ import java.util.Iterator;
import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBContext;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang.builder.ToStringStyle; import org.apache.commons.lang3.builder.ToStringStyle;
import org.apache.hadoop.hbase.testclassification.RestTests; import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;

View File

@ -148,8 +148,8 @@
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-logging</groupId> <groupId>commons-logging</groupId>

View File

@ -28,7 +28,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.master.balancer;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -503,8 +503,8 @@
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-logging</groupId> <groupId>commons-logging</groupId>

View File

@ -22,7 +22,7 @@
</%args> </%args>
<%import> <%import>
java.util.*; java.util.*;
org.apache.commons.lang.time.FastDateFormat; org.apache.commons.lang3.time.FastDateFormat;
org.apache.hadoop.hbase.regionserver.HRegionServer; org.apache.hadoop.hbase.regionserver.HRegionServer;
org.apache.hadoop.hbase.util.Bytes; org.apache.hadoop.hbase.util.Bytes;
org.apache.hadoop.hbase.HRegionInfo; org.apache.hadoop.hbase.HRegionInfo;

View File

@ -26,8 +26,8 @@ import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang.mutable.MutableInt; import org.apache.commons.lang3.mutable.MutableInt;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -269,7 +269,7 @@ public class ZkSplitLogWorkerCoordination extends ZooKeeperListener implements
// after a successful submit, sleep a little bit to allow other RSs to grab the rest tasks // after a successful submit, sleep a little bit to allow other RSs to grab the rest tasks
try { try {
int sleepTime = RandomUtils.nextInt(500) + 500; int sleepTime = RandomUtils.nextInt(0, 500) + 500;
Thread.sleep(sleepTime); Thread.sleep(sleepTime);
} catch (InterruptedException e) { } catch (InterruptedException e) {
LOG.warn("Interrupted while yielding for other region servers", e); LOG.warn("Interrupted while yielding for other region servers", e);

View File

@ -31,7 +31,7 @@ import java.util.Map.Entry;
import java.util.Random; import java.util.Random;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -73,7 +73,7 @@ import javax.security.sasl.SaslClient;
import javax.security.sasl.SaslException; import javax.security.sasl.SaslException;
import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -45,7 +45,7 @@ import org.apache.commons.cli.OptionGroup;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -52,7 +52,7 @@ import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.lang.mutable.MutableInt; import org.apache.commons.lang3.mutable.MutableInt;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -23,7 +23,7 @@ import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang.ClassUtils; import org.apache.commons.lang3.ClassUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -23,7 +23,7 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;

View File

@ -36,7 +36,7 @@ import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;

View File

@ -22,7 +22,7 @@ import java.io.IOException;
import java.io.InterruptedIOException; import java.io.InterruptedIOException;
import java.util.NavigableSet; import java.util.NavigableSet;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -360,4 +360,4 @@ public class TableNamespaceManager {
} }
return maxRegions; return maxRegions;
} }
} }

View File

@ -20,11 +20,9 @@ package org.apache.hadoop.hbase.master.balancer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.Deque; import java.util.Deque;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -36,12 +34,13 @@ import java.util.TreeMap;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.ClusterStatus; import org.apache.hadoop.hbase.ClusterStatus;
import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HDFSBlocksDistribution; import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.ServerLoad; import org.apache.hadoop.hbase.ServerLoad;
@ -53,15 +52,12 @@ import org.apache.hadoop.hbase.master.MasterServices;
import org.apache.hadoop.hbase.master.RackManager; import org.apache.hadoop.hbase.master.RackManager;
import org.apache.hadoop.hbase.master.RegionPlan; import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type; import org.apache.hadoop.hbase.master.balancer.BaseLoadBalancer.Cluster.Action.Type;
import org.apache.hadoop.hbase.security.access.AccessControlLists;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner; import org.apache.hadoop.hbase.shaded.com.google.common.base.Joiner;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap; import org.apache.hadoop.hbase.shaded.com.google.common.collect.ArrayListMultimap;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Sets;
import org.apache.zookeeper.KeeperException;
/** /**
* The base class for load balancers. It provides the the functions used to by * The base class for load balancers. It provides the the functions used to by
@ -634,7 +630,7 @@ public abstract class BaseLoadBalancer implements LoadBalancer {
public Action undoAction() { public Action undoAction() {
// TODO implement this. This action is not being used by the StochasticLB for now // TODO implement this. This action is not being used by the StochasticLB for now
// in case it uses it, we should implement this function. // in case it uses it, we should implement this function.
throw new NotImplementedException(); throw new NotImplementedException(HConstants.NOT_IMPLEMENTED);
} }
@Override @Override
public String toString() { public String toString() {

View File

@ -27,7 +27,7 @@ import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DoNotRetryIOException;

View File

@ -31,7 +31,7 @@ import java.util.concurrent.TimeUnit;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -18,6 +18,7 @@
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import java.util.Collection; import java.util.Collection;
import java.util.Comparator; import java.util.Comparator;
@ -61,7 +62,7 @@ public class CellSet implements NavigableSet<Cell> {
} }
public Cell ceiling(Cell e) { public Cell ceiling(Cell e) {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public Iterator<Cell> descendingIterator() { public Iterator<Cell> descendingIterator() {
@ -69,11 +70,11 @@ public class CellSet implements NavigableSet<Cell> {
} }
public NavigableSet<Cell> descendingSet() { public NavigableSet<Cell> descendingSet() {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public Cell floor(Cell e) { public Cell floor(Cell e) {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public SortedSet<Cell> headSet(final Cell toElement) { public SortedSet<Cell> headSet(final Cell toElement) {
@ -86,7 +87,7 @@ public class CellSet implements NavigableSet<Cell> {
} }
public Cell higher(Cell e) { public Cell higher(Cell e) {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public Iterator<Cell> iterator() { public Iterator<Cell> iterator() {
@ -94,24 +95,24 @@ public class CellSet implements NavigableSet<Cell> {
} }
public Cell lower(Cell e) { public Cell lower(Cell e) {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public Cell pollFirst() { public Cell pollFirst() {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public Cell pollLast() { public Cell pollLast() {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public SortedSet<Cell> subSet(Cell fromElement, Cell toElement) { public SortedSet<Cell> subSet(Cell fromElement, Cell toElement) {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public NavigableSet<Cell> subSet(Cell fromElement, public NavigableSet<Cell> subSet(Cell fromElement,
boolean fromInclusive, Cell toElement, boolean toInclusive) { boolean fromInclusive, Cell toElement, boolean toInclusive) {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public SortedSet<Cell> tailSet(Cell fromElement) { public SortedSet<Cell> tailSet(Cell fromElement) {
@ -123,7 +124,7 @@ public class CellSet implements NavigableSet<Cell> {
} }
public Comparator<? super Cell> comparator() { public Comparator<? super Cell> comparator() {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public Cell first() { public Cell first() {
@ -139,7 +140,7 @@ public class CellSet implements NavigableSet<Cell> {
} }
public boolean addAll(Collection<? extends Cell> c) { public boolean addAll(Collection<? extends Cell> c) {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public void clear() { public void clear() {
@ -152,7 +153,7 @@ public class CellSet implements NavigableSet<Cell> {
} }
public boolean containsAll(Collection<?> c) { public boolean containsAll(Collection<?> c) {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public boolean isEmpty() { public boolean isEmpty() {
@ -164,11 +165,11 @@ public class CellSet implements NavigableSet<Cell> {
} }
public boolean removeAll(Collection<?> c) { public boolean removeAll(Collection<?> c) {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public boolean retainAll(Collection<?> c) { public boolean retainAll(Collection<?> c) {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public Cell get(Cell kv) { public Cell get(Cell kv) {
@ -180,10 +181,10 @@ public class CellSet implements NavigableSet<Cell> {
} }
public Object[] toArray() { public Object[] toArray() {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
public <T> T[] toArray(T[] a) { public <T> T[] toArray(T[] a) {
throw new UnsupportedOperationException("Not implemented"); throw new UnsupportedOperationException(HConstants.NOT_IMPLEMENTED);
} }
} }

View File

@ -56,8 +56,8 @@ import javax.management.MalformedObjectNameException;
import javax.management.ObjectName; import javax.management.ObjectName;
import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServlet;
import org.apache.commons.lang.SystemUtils; import org.apache.commons.lang3.SystemUtils;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -1824,7 +1824,7 @@ public class HRegionServer extends HasThread implements
if (((HRegion)r).shouldFlush(whyFlush)) { if (((HRegion)r).shouldFlush(whyFlush)) {
FlushRequester requester = server.getFlushRequester(); FlushRequester requester = server.getFlushRequester();
if (requester != null) { if (requester != null) {
long randomDelay = RandomUtils.nextInt(RANGE_OF_DELAY) + MIN_DELAY_TIME; long randomDelay = RandomUtils.nextInt(0, RANGE_OF_DELAY) + MIN_DELAY_TIME;
LOG.info(getName() + " requesting flush of " + LOG.info(getName() + " requesting flush of " +
r.getRegionInfo().getRegionNameAsString() + " because " + r.getRegionInfo().getRegionNameAsString() + " because " +
whyFlush.toString() + whyFlush.toString() +
@ -3510,7 +3510,7 @@ public class HRegionServer extends HasThread implements
private boolean isHealthCheckerConfigured() { private boolean isHealthCheckerConfigured() {
String healthScriptLocation = this.conf.get(HConstants.HEALTH_SCRIPT_LOC); String healthScriptLocation = this.conf.get(HConstants.HEALTH_SCRIPT_LOC);
return org.apache.commons.lang.StringUtils.isNotBlank(healthScriptLocation); return org.apache.commons.lang3.StringUtils.isNotBlank(healthScriptLocation);
} }
/** /**

View File

@ -23,7 +23,7 @@ import java.util.List;
import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory; import org.apache.hadoop.hbase.CompatibilitySingletonFactory;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;

View File

@ -43,7 +43,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder; import java.util.concurrent.atomic.LongAdder;
import org.apache.commons.lang.mutable.MutableObject; import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

Some files were not shown because too many files have changed in this diff Show More