diff --git a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/StabilityOptions.java b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/StabilityOptions.java index b79f64531c4..4e2402f48f8 100644 --- a/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/StabilityOptions.java +++ b/hbase-annotations/src/main/java/org/apache/hadoop/hbase/classification/tools/StabilityOptions.java @@ -21,6 +21,7 @@ import com.sun.javadoc.DocErrorReporter; import java.util.ArrayList; import java.util.List; +import java.util.Locale; class StabilityOptions { public static final String STABLE_OPTION = "-stable"; @@ -28,7 +29,7 @@ class StabilityOptions { public static final String UNSTABLE_OPTION = "-unstable"; public static Integer optionLength(String option) { - String opt = option.toLowerCase(); + String opt = option.toLowerCase(Locale.ROOT); if (opt.equals(UNSTABLE_OPTION)) return 1; if (opt.equals(EVOLVING_OPTION)) return 1; if (opt.equals(STABLE_OPTION)) return 1; @@ -37,7 +38,7 @@ class StabilityOptions { public static void validOptions(String[][] options, DocErrorReporter reporter) { for (int i = 0; i < options.length; i++) { - String opt = options[i][0].toLowerCase(); + String opt = options[i][0].toLowerCase(Locale.ROOT); if (opt.equals(UNSTABLE_OPTION)) { RootDocProcessor.stability = UNSTABLE_OPTION; } else if (opt.equals(EVOLVING_OPTION)) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java index 6912ab51431..5891a5e6899 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.Locale; import java.util.Map; import java.util.Set; @@ -477,11 +478,11 @@ public class HColumnDescriptor implements WritableComparable setBlockCacheEnabled(blockCacheEnabled); setTimeToLive(timeToLive); setCompressionType(Compression.Algorithm. - valueOf(compression.toUpperCase())); + valueOf(compression.toUpperCase(Locale.ROOT))); setDataBlockEncoding(DataBlockEncoding. - valueOf(dataBlockEncoding.toUpperCase())); + valueOf(dataBlockEncoding.toUpperCase(Locale.ROOT))); setBloomFilterType(BloomType. - valueOf(bloomFilter.toUpperCase())); + valueOf(bloomFilter.toUpperCase(Locale.ROOT))); setBlocksize(blocksize); setScope(scope); } @@ -603,7 +604,7 @@ public class HColumnDescriptor implements WritableComparable if (n == null) { return Compression.Algorithm.NONE; } - return Compression.Algorithm.valueOf(n.toUpperCase()); + return Compression.Algorithm.valueOf(n.toUpperCase(Locale.ROOT)); } /** @return compression type being used for the column family for major @@ -613,7 +614,7 @@ public class HColumnDescriptor implements WritableComparable if (n == null) { return getCompression(); } - return Compression.Algorithm.valueOf(n.toUpperCase()); + return Compression.Algorithm.valueOf(n.toUpperCase(Locale.ROOT)); } /** @return maximum number of versions */ @@ -708,7 +709,7 @@ public class HColumnDescriptor implements WritableComparable * @return this (for chained invocation) */ public HColumnDescriptor setCompressionType(Compression.Algorithm type) { - return setValue(COMPRESSION, type.getName().toUpperCase()); + return setValue(COMPRESSION, type.getName().toUpperCase(Locale.ROOT)); } /** @@ -820,7 +821,7 @@ public class HColumnDescriptor implements WritableComparable */ public HColumnDescriptor setCompactionCompressionType( Compression.Algorithm type) { - return setValue(COMPRESSION_COMPACT, type.getName().toUpperCase()); + return setValue(COMPRESSION_COMPACT, type.getName().toUpperCase(Locale.ROOT)); } /** @@ -847,7 +848,7 @@ public class HColumnDescriptor implements WritableComparable String value = getValue(KEEP_DELETED_CELLS); if (value != null) { // toUpperCase for backwards compatibility - return KeepDeletedCells.valueOf(value.toUpperCase()); + return KeepDeletedCells.valueOf(value.toUpperCase(Locale.ROOT)); } return DEFAULT_KEEP_DELETED; } @@ -937,7 +938,7 @@ public class HColumnDescriptor implements WritableComparable if (n == null) { n = DEFAULT_BLOOMFILTER; } - return BloomType.valueOf(n.toUpperCase()); + return BloomType.valueOf(n.toUpperCase(Locale.ROOT)); } /** diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java index 5ccdd35d0e3..4dc5a5a59e8 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ServerName.java @@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.util.Bytes; import java.io.Serializable; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.regex.Pattern; /** @@ -229,7 +230,7 @@ public class ServerName implements Comparable, Serializable { */ static String getServerName(String hostName, int port, long startcode) { final StringBuilder name = new StringBuilder(hostName.length() + 1 + 5 + 1 + 13); - name.append(hostName.toLowerCase()); + name.append(hostName.toLowerCase(Locale.ROOT)); name.append(SERVERNAME_SEPARATOR); name.append(port); name.append(SERVERNAME_SEPARATOR); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java index 5eb3703603b..1f0043c0c59 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.filter; import com.google.protobuf.InvalidProtocolBufferException; +import java.util.Locale; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -52,8 +53,8 @@ public class SubstringComparator extends ByteArrayComparable { * @param substr the substring */ public SubstringComparator(String substr) { - super(Bytes.toBytes(substr.toLowerCase())); - this.substr = substr.toLowerCase(); + super(Bytes.toBytes(substr.toLowerCase(Locale.ROOT))); + this.substr = substr.toLowerCase(Locale.ROOT); } @Override @@ -63,7 +64,7 @@ public class SubstringComparator extends ByteArrayComparable { @Override public int compareTo(byte[] value, int offset, int length) { - return Bytes.toString(value, offset, length).toLowerCase().contains(substr) ? 0 + return Bytes.toString(value, offset, length).toLowerCase(Locale.ROOT).contains(substr) ? 0 : 1; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java index 9ab17f5653f..e24050bd368 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AsyncRpcChannel.java @@ -27,6 +27,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Random; import java.util.concurrent.TimeUnit; @@ -250,7 +251,7 @@ public class AsyncRpcChannel { return new SaslClientHandler(realTicket, authMethod, token, serverPrincipal, client.fallbackAllowed, client.conf.get("hbase.rpc.protection", - SaslUtil.QualityOfProtection.AUTHENTICATION.name().toLowerCase()), + SaslUtil.QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)), new SaslClientHandler.SaslExceptionHandler() { @Override public void handle(int retryCount, Random random, Throwable cause) { @@ -458,7 +459,7 @@ public class AsyncRpcChannel { throw new IOException("Can't obtain server Kerberos config key from SecurityInfo"); } this.serverPrincipal = SecurityUtil.getServerPrincipal(client.conf.get(serverKey), - address.getAddress().getCanonicalHostName().toLowerCase()); + address.getAddress().getCanonicalHostName().toLowerCase(Locale.ROOT)); if (LOG.isDebugEnabled()) { LOG.debug("RPC Server Kerberos principal name for service=" + serviceName + " is " + serverPrincipal); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java index 6f68735c9ec..a0c9dd3892a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClientImpl.java @@ -38,6 +38,7 @@ import java.security.PrivilegedExceptionAction; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.Random; @@ -332,7 +333,7 @@ public class RpcClientImpl extends AbstractRpcClient { "Can't obtain server Kerberos config key from SecurityInfo"); } serverPrincipal = SecurityUtil.getServerPrincipal( - conf.get(serverKey), server.getAddress().getCanonicalHostName().toLowerCase()); + conf.get(serverKey), server.getAddress().getCanonicalHostName().toLowerCase(Locale.ROOT)); if (LOG.isDebugEnabled()) { LOG.debug("RPC Server Kerberos principal name for service=" + remoteId.getServiceName() + " is " + serverPrincipal); @@ -613,7 +614,7 @@ public class RpcClientImpl extends AbstractRpcClient { final OutputStream out2) throws IOException { saslRpcClient = new HBaseSaslRpcClient(authMethod, token, serverPrincipal, fallbackAllowed, conf.get("hbase.rpc.protection", - QualityOfProtection.AUTHENTICATION.name().toLowerCase())); + QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT))); return saslRpcClient.saslConnect(in2, out2); } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java index 8033f7c3147..cfc40886820 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.security; import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.hbase.classification.InterfaceAudience; +import java.util.Locale; import java.util.Map; import java.util.TreeMap; @@ -79,13 +80,13 @@ public class SaslUtil { */ public static QualityOfProtection getQop(String stringQop) { QualityOfProtection qop = null; - if (QualityOfProtection.AUTHENTICATION.name().toLowerCase().equals(stringQop) + if (QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT).equals(stringQop) || QualityOfProtection.AUTHENTICATION.saslQop.equals(stringQop)) { qop = QualityOfProtection.AUTHENTICATION; - } else if (QualityOfProtection.INTEGRITY.name().toLowerCase().equals(stringQop) + } else if (QualityOfProtection.INTEGRITY.name().toLowerCase(Locale.ROOT).equals(stringQop) || QualityOfProtection.INTEGRITY.saslQop.equals(stringQop)) { qop = QualityOfProtection.INTEGRITY; - } else if (QualityOfProtection.PRIVACY.name().toLowerCase().equals(stringQop) + } else if (QualityOfProtection.PRIVACY.name().toLowerCase(Locale.ROOT).equals(stringQop) || QualityOfProtection.PRIVACY.saslQop.equals(stringQop)) { qop = QualityOfProtection.PRIVACY; } diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java index 9d0319b7d21..b683fcc9e4e 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/PoolMap.java @@ -23,6 +23,7 @@ import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -254,7 +255,7 @@ public class PoolMap implements Map { } public static String fuzzyNormalize(String name) { - return name != null ? name.replaceAll("-", "").trim().toLowerCase() : ""; + return name != null ? name.replaceAll("-", "").trim().toLowerCase(Locale.ROOT) : ""; } public static PoolType fuzzyMatch(String name) { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java index be5bf6e5b4e..40b84cfbb50 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/MetaTableLocator.java @@ -26,6 +26,7 @@ import java.net.SocketTimeoutException; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -409,7 +410,7 @@ public class MetaTableLocator { } else if (cause != null && cause instanceof EOFException) { // Catch. Other end disconnected us. } else if (cause != null && cause.getMessage() != null && - cause.getMessage().toLowerCase().contains("connection reset")) { + cause.getMessage().toLowerCase(Locale.ROOT).contains("connection reset")) { // Catch. Connection reset. } else { throw ioe; diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java index 0cc4bd2d9f8..95856d84b7d 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/crypto/KeyStoreKeyProvider.java @@ -30,6 +30,7 @@ import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; +import java.util.Locale; import java.util.Properties; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -142,7 +143,7 @@ public class KeyStoreKeyProvider implements KeyProvider { throw new RuntimeException("KeyProvider scheme should specify KeyStore type"); } // KeyStore expects instance type specifications in uppercase - store = KeyStore.getInstance(storeType.toUpperCase()); + store = KeyStore.getInstance(storeType.toUpperCase(Locale.ROOT)); processParameters(uri); load(uri); } catch (URISyntaxException e) { diff --git a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.java b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.java index 4098e263862..76bbb099b2e 100644 --- a/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.java +++ b/hbase-hadoop2-compat/src/main/java/org/apache/hadoop/hbase/ipc/MetricsHBaseServerSourceFactoryImpl.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.ipc; import java.util.HashMap; +import java.util.Locale; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -48,7 +49,7 @@ public class MetricsHBaseServerSourceFactoryImpl extends MetricsHBaseServerSourc source = new MetricsHBaseServerSourceImpl( context, METRICS_DESCRIPTION, - context.toLowerCase(), + context.toLowerCase(Locale.ROOT), context + METRICS_JMX_CONTEXT_SUFFIX, wrap); //Store back in storage diff --git a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelperImpl.java b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelperImpl.java index e19e391b529..aaac4985376 100644 --- a/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelperImpl.java +++ b/hbase-hadoop2-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelperImpl.java @@ -29,6 +29,7 @@ import org.apache.hadoop.metrics2.MetricsTag; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import java.util.HashMap; +import java.util.Locale; import java.util.Map; import static org.junit.Assert.*; @@ -246,6 +247,6 @@ public class MetricsAssertHelperImpl implements MetricsAssertHelper { } private String canonicalizeMetricName(String in) { - return in.toLowerCase().replaceAll("[^A-Za-z0-9 ]", ""); + return in.toLowerCase(Locale.ROOT).replaceAll("[^A-Za-z0-9 ]", ""); } } diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java index ba6a4a9a70e..b0281fb13ed 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/HBaseClusterManager.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase; import java.io.File; import java.io.IOException; +import java.util.Locale; import java.util.Map; import org.apache.commons.lang.StringUtils; @@ -205,7 +206,7 @@ public class HBaseClusterManager extends Configured implements ClusterManager { @Override public String getCommand(ServiceType service, Operation op) { return String.format("%s/bin/hbase-daemon.sh %s %s %s", hbaseHome, confDir, - op.toString().toLowerCase(), service); + op.toString().toLowerCase(Locale.ROOT), service); } } @@ -235,7 +236,7 @@ public class HBaseClusterManager extends Configured implements ClusterManager { @Override public String getCommand(ServiceType service, Operation op) { return String.format("%s/sbin/hadoop-daemon.sh %s %s %s", hadoopHome, confDir, - op.toString().toLowerCase(), service); + op.toString().toLowerCase(Locale.ROOT), service); } } @@ -264,7 +265,7 @@ public class HBaseClusterManager extends Configured implements ClusterManager { @Override public String getCommand(ServiceType service, Operation op) { - return String.format("%s/bin/zkServer.sh %s", zookeeperHome, op.toString().toLowerCase()); + return String.format("%s/bin/zkServer.sh %s", zookeeperHome, op.toString().toLowerCase(Locale.ROOT)); } @Override diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java index 717de1764be..04a3b05fe22 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/RESTApiClusterManager.java @@ -37,6 +37,7 @@ import javax.xml.ws.http.HTTPException; import java.io.IOException; import java.net.URI; import java.util.HashMap; +import java.util.Locale; import java.util.Map; /** @@ -274,8 +275,8 @@ public class RESTApiClusterManager extends Configured implements ClusterManager if (role.get("hostRef").get("hostId").getTextValue().equals(hostId) && role.get("type") .getTextValue() - .toLowerCase() - .equals(roleType.toLowerCase())) { + .toLowerCase(Locale.ROOT) + .equals(roleType.toLowerCase(Locale.ROOT))) { roleValue = role.get(property).getTextValue(); break; } @@ -328,7 +329,7 @@ public class RESTApiClusterManager extends Configured implements ClusterManager // APIs tend to take commands in lowercase, so convert them to save the trouble later. @Override public String toString() { - return name().toLowerCase(); + return name().toLowerCase(Locale.ROOT); } } @@ -348,4 +349,4 @@ public class RESTApiClusterManager extends Configured implements ClusterManager private enum Service { HBASE, HDFS, MAPREDUCE } -} \ No newline at end of file +} diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java index 848017fbfc7..a6b502fb647 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/StripeCompactionsPerformanceEvaluation.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase; import java.io.IOException; +import java.util.Locale; import java.util.Set; import org.apache.commons.cli.CommandLine; @@ -111,7 +112,7 @@ public class StripeCompactionsPerformanceEvaluation extends AbstractHBaseTool { } else { minValueSize = maxValueSize = Integer.parseInt(valueSize); } - String datagen = cmd.getOptionValue(DATAGEN_KEY, "default").toLowerCase(); + String datagen = cmd.getOptionValue(DATAGEN_KEY, "default").toLowerCase(Locale.ROOT); if ("default".equals(datagen)) { dataGen = new MultiThreadedAction.DefaultDataGenerator( minValueSize, maxValueSize, 1, 1, new byte[][] { COLUMN_FAMILY }); diff --git a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java index 4995b860735..094ae0b3d58 100644 --- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java +++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/filter/GzipFilter.java @@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.rest.filter; import java.io.IOException; import java.io.OutputStream; import java.util.HashSet; +import java.util.Locale; import java.util.Set; import java.util.StringTokenizer; @@ -65,11 +66,11 @@ public class GzipFilter implements Filter { String acceptEncoding = request.getHeader("accept-encoding"); String contentType = request.getHeader("content-type"); if ((contentEncoding != null) && - (contentEncoding.toLowerCase().indexOf("gzip") > -1)) { + (contentEncoding.toLowerCase(Locale.ROOT).indexOf("gzip") > -1)) { request = new GZIPRequestWrapper(request); } if (((acceptEncoding != null) && - (acceptEncoding.toLowerCase().indexOf("gzip") > -1)) || + (acceptEncoding.toLowerCase(Locale.ROOT).indexOf("gzip") > -1)) || ((contentType != null) && mimeTypes.contains(contentType))) { response = new GZIPResponseWrapper(response); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java index 40c11aa65a1..880df368ce5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcExecutor.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.ipc; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicInteger; @@ -221,7 +222,7 @@ public abstract class RpcExecutor { */ public void resizeQueues(Configuration conf) { String configKey = RpcScheduler.IPC_SERVER_MAX_CALLQUEUE_LENGTH; - if (name != null && name.toLowerCase().contains("priority")) { + if (name != null && name.toLowerCase(Locale.ROOT).contains("priority")) { configKey = RpcScheduler.IPC_SERVER_PRIORITY_MAX_CALLQUEUE_LENGTH; } currentQueueLimit = conf.getInt(configKey, currentQueueLimit); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java index c3b8d157837..0a5d93708e4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/Import.java @@ -28,6 +28,7 @@ import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.TreeMap; import java.util.UUID; @@ -391,7 +392,7 @@ public class Import { filter = instantiateFilter(conf); String durabilityStr = conf.get(WAL_DURABILITY); if(durabilityStr != null){ - durability = Durability.valueOf(durabilityStr.toUpperCase()); + durability = Durability.valueOf(durabilityStr.toUpperCase(Locale.ROOT)); } // TODO: This is kind of ugly doing setup of ZKW just to read the clusterid. ZooKeeperWatcher zkw = null; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java index be20d9015f3..7ad68eafe7e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableInputFormat.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import java.util.Collections; import java.util.List; +import java.util.Locale; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -253,7 +254,7 @@ implements Configurable { @Override public List getSplits(JobContext context) throws IOException { List splits = super.getSplits(context); - if ((conf.get(SHUFFLE_MAPS) != null) && "true".equals(conf.get(SHUFFLE_MAPS).toLowerCase())) { + if ((conf.get(SHUFFLE_MAPS) != null) && "true".equals(conf.get(SHUFFLE_MAPS).toLowerCase(Locale.ROOT))) { Collections.shuffle(splits); } return splits; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java index 65375b8c810..be2bd91041b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/BaseRowProcessor.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.UUID; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -61,7 +62,7 @@ implements RowProcessor { @Override public String getName() { - return this.getClass().getSimpleName().toLowerCase(); + return this.getClass().getSimpleName().toLowerCase(Locale.ROOT); } @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 3de88382d02..2e2ecfea433 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -34,6 +34,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.NavigableMap; @@ -8433,7 +8434,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi } boolean majorCompact = false; if (args.length > 1) { - if (!args[1].toLowerCase().startsWith("major")) { + if (!args[1].toLowerCase(Locale.ROOT).startsWith("major")) { printUsageAndExit("ERROR: Unrecognized option <" + args[1] + ">"); } majorCompact = true; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java index b9e56d933a5..24ee5e3feba 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/HBaseSaslRpcServer.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.security; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; +import java.util.Locale; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; @@ -50,7 +51,7 @@ public class HBaseSaslRpcServer { public static void init(Configuration conf) { SaslUtil.initSaslProperties(conf.get("hbase.rpc.protection", - QualityOfProtection.AUTHENTICATION.name().toLowerCase())); + QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT))); } public static T getIdentifier(String id, diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java index 06b601735da..10d1df89832 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/CreateSnapshot.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.util.AbstractHBaseTool; import java.util.Arrays; +import java.util.Locale; /** @@ -66,7 +67,7 @@ public class CreateSnapshot extends AbstractHBaseTool { admin = connection.getAdmin(); HBaseProtos.SnapshotDescription.Type type = HBaseProtos.SnapshotDescription.Type.FLUSH; if (snapshotType != null) { - type = HBaseProtos.SnapshotDescription.Type.valueOf(snapshotName.toUpperCase()); + type = HBaseProtos.SnapshotDescription.Type.valueOf(snapshotName.toUpperCase(Locale.ROOT)); } admin.snapshot(snapshotName, TableName.valueOf(tableName), type); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java index e131bd35528..d0e2baf35e5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.util; import java.io.IOException; +import java.util.Locale; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; @@ -53,7 +54,7 @@ public class CompressionTest { private static final Log LOG = LogFactory.getLog(CompressionTest.class); public static boolean testCompression(String codec) { - codec = codec.toLowerCase(); + codec = codec.toLowerCase(Locale.ROOT); Compression.Algorithm a; @@ -109,7 +110,7 @@ public class CompressionTest { System.err.println( "Usage: CompressionTest " + - StringUtils.join( Compression.Algorithm.values(), "|").toLowerCase() + + StringUtils.join( Compression.Algorithm.values(), "|").toLowerCase(Locale.ROOT) + "\n" + "For example:\n" + " hbase " + CompressionTest.class + " file:///tmp/testfile gz\n"); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java index c1947a2f4ac..9013bab8b55 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/DirectMemoryUtils.java @@ -25,6 +25,7 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.nio.ByteBuffer; import java.util.List; +import java.util.Locale; import javax.management.JMException; import javax.management.MBeanServer; @@ -87,7 +88,7 @@ public class DirectMemoryUtils { long multiplier = 1; //for the byte case. for (String s : arguments) { if (s.contains("-XX:MaxDirectMemorySize=")) { - String memSize = s.toLowerCase() + String memSize = s.toLowerCase(Locale.ROOT) .replace("-xx:maxdirectmemorysize=", "").trim(); if (memSize.contains("k")) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java index 87c50d9b34e..06b8e9169b7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java @@ -35,6 +35,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ConcurrentHashMap; @@ -120,7 +121,7 @@ public abstract class FSUtils { */ public static void setStoragePolicy(final FileSystem fs, final Configuration conf, final Path path, final String policyKey, final String defaultPolicy) { - String storagePolicy = conf.get(policyKey, defaultPolicy).toUpperCase(); + String storagePolicy = conf.get(policyKey, defaultPolicy).toUpperCase(Locale.ROOT); if (storagePolicy.equals(defaultPolicy)) { if (LOG.isTraceEnabled()) { LOG.trace("default policy of " + defaultPolicy + " requested, exiting early."); @@ -1910,7 +1911,7 @@ public abstract class FSUtils { return false; } - if (!regionName.toLowerCase().matches("[0-9a-f]+")) { + if (!regionName.toLowerCase(Locale.ROOT).matches("[0-9a-f]+")) { return false; } return true; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java index c5a724a1881..3eef755e2a7 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java @@ -44,6 +44,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.Set; @@ -4070,7 +4071,7 @@ public class HBaseFsck extends Configured implements Closeable { errors.progress(); String encodedName = regionDir.getPath().getName(); // ignore directories that aren't hexadecimal - if (!encodedName.toLowerCase().matches("[0-9a-f]+")) { + if (!encodedName.toLowerCase(Locale.ROOT).matches("[0-9a-f]+")) { continue; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java index 983d49c9d20..e6b746c9081 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/ServerCommandLine.java @@ -22,6 +22,7 @@ import java.lang.management.ManagementFactory; import java.lang.management.RuntimeMXBean; import java.util.Arrays; import java.util.HashSet; +import java.util.Locale; import java.util.Map.Entry; import java.util.Set; @@ -103,8 +104,8 @@ public abstract class ServerCommandLine extends Configured implements Tool { nextEnv: for (Entry entry : System.getenv().entrySet()) { - String key = entry.getKey().toLowerCase(); - String value = entry.getValue().toLowerCase(); + String key = entry.getKey().toLowerCase(Locale.ROOT); + String value = entry.getValue().toLowerCase(Locale.ROOT); // exclude variables which may contain skip words for(String skipWord : skipWords) { if (key.contains(skipWord) || value.contains(skipWord)) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java index 12f50733013..aedfd1c4397 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/PerformanceEvaluation.java @@ -31,6 +31,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.LinkedList; +import java.util.Locale; import java.util.Map; import java.util.Queue; import java.util.Random; @@ -312,8 +313,8 @@ public class PerformanceEvaluation extends Configured implements Tool { static boolean checkTable(Admin admin, TestOptions opts) throws IOException { TableName tableName = TableName.valueOf(opts.tableName); boolean needsDelete = false, exists = admin.tableExists(tableName); - boolean isReadCmd = opts.cmdName.toLowerCase().contains("read") - || opts.cmdName.toLowerCase().contains("scan"); + boolean isReadCmd = opts.cmdName.toLowerCase(Locale.ROOT).contains("read") + || opts.cmdName.toLowerCase(Locale.ROOT).contains("scan"); if (!exists && isReadCmd) { throw new IllegalStateException( "Must specify an existing table for read commands. Run a write command first."); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java index c9ab40d25fe..6b0b5381bb8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestIPv6NIOServerSocketChannel.java @@ -24,6 +24,7 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.nio.channels.ServerSocketChannel; +import java.util.Locale; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -123,7 +124,7 @@ public class TestIPv6NIOServerSocketChannel { //java.net.SocketException: Address family not supported by protocol family //or java.net.SocketException: Protocol family not supported Assert.assertFalse(ex.getClass().isInstance(BindException.class)); - Assert.assertTrue(ex.getMessage().toLowerCase().contains("protocol family")); + Assert.assertTrue(ex.getMessage().toLowerCase(Locale.ROOT).contains("protocol family")); LOG.info("Received expected exception:"); LOG.info(ex); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java index 9b3784d0577..5beeace8e57 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TimedOutTestsListener.java @@ -27,6 +27,7 @@ import java.lang.management.ThreadMXBean; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Date; +import java.util.Locale; import java.util.Map; import org.junit.runner.notification.Failure; @@ -93,7 +94,7 @@ public class TimedOutTestsListener extends RunListener { thread.getPriority(), thread.getId(), Thread.State.WAITING.equals(thread.getState()) ? - "in Object.wait()" : thread.getState().name().toLowerCase(), + "in Object.wait()" : thread.getState().name().toLowerCase(Locale.ROOT), Thread.State.WAITING.equals(thread.getState()) ? "WAITING (on object monitor)" : thread.getState())); for (StackTraceElement stackTraceElement : e.getValue()) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java index 174cc1f1241..a502e194fa4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatTestBase.java @@ -47,6 +47,7 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.NavigableMap; @@ -230,8 +231,8 @@ public abstract class MultiTableInputFormatTestBase { private void testScan(String start, String stop, String last) throws IOException, InterruptedException, ClassNotFoundException { String jobName = - "Scan" + (start != null ? start.toUpperCase() : "Empty") + "To" + - (stop != null ? stop.toUpperCase() : "Empty"); + "Scan" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") + "To" + + (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty"); LOG.info("Before map/reduce startup - job " + jobName); Configuration c = new Configuration(TEST_UTIL.getConfiguration()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java index 4f1fc0fbd04..e200442a9a7 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java @@ -24,6 +24,7 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; +import java.util.Locale; import java.util.TreeMap; import org.apache.hadoop.conf.Configuration; @@ -385,7 +386,7 @@ public class TestLoadIncrementalHFiles { // set real family name to upper case in purpose to simulate the case that // family name in HFiles is invalid HColumnDescriptor family = - new HColumnDescriptor(Bytes.toBytes(new String(FAMILY).toUpperCase())); + new HColumnDescriptor(Bytes.toBytes(new String(FAMILY).toUpperCase(Locale.ROOT))); htd.addFamily(family); try { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java index 8a4a24497cd..c5821a85de4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultiTableInputFormat.java @@ -24,6 +24,7 @@ import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.NavigableMap; @@ -206,8 +207,8 @@ public class TestMultiTableInputFormat { private void testScan(String start, String stop, String last) throws IOException, InterruptedException, ClassNotFoundException { String jobName = - "Scan" + (start != null ? start.toUpperCase() : "Empty") + "To" + - (stop != null ? stop.toUpperCase() : "Empty"); + "Scan" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") + "To" + + (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty"); LOG.info("Before map/reduce startup - job " + jobName); Configuration c = new Configuration(TEST_UTIL.getConfiguration()); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java index ab53e3e9125..6521d2dd4f1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormatScanBase.java @@ -23,6 +23,7 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.NavigableMap; @@ -177,8 +178,8 @@ public abstract class TestTableInputFormatScanBase { */ protected void testScanFromConfiguration(String start, String stop, String last) throws IOException, InterruptedException, ClassNotFoundException { - String jobName = "ScanFromConfig" + (start != null ? start.toUpperCase() : "Empty") + - "To" + (stop != null ? stop.toUpperCase() : "Empty"); + String jobName = "ScanFromConfig" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") + + "To" + (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty"); Configuration c = new Configuration(TEST_UTIL.getConfiguration()); c.set(TableInputFormat.INPUT_TABLE, Bytes.toString(TABLE_NAME)); c.set(TableInputFormat.SCAN_COLUMN_FAMILY, Bytes.toString(INPUT_FAMILY)); @@ -214,8 +215,8 @@ public abstract class TestTableInputFormatScanBase { */ protected void testScan(String start, String stop, String last) throws IOException, InterruptedException, ClassNotFoundException { - String jobName = "Scan" + (start != null ? start.toUpperCase() : "Empty") + - "To" + (stop != null ? stop.toUpperCase() : "Empty"); + String jobName = "Scan" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") + + "To" + (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty"); LOG.info("Before map/reduce startup - job " + jobName); Configuration c = new Configuration(TEST_UTIL.getConfiguration()); Scan scan = new Scan(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java index 1927334d8c0..0c9a30fc935 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/DataBlockEncodingTool.java @@ -24,6 +24,7 @@ import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.Locale; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -142,7 +143,7 @@ public class DataBlockEncodingTool { String s = super.toString(); StringBuilder sb = new StringBuilder(); sb.append(s.charAt(0)); - sb.append(s.substring(1).toLowerCase()); + sb.append(s.substring(1).toLowerCase(Locale.ROOT)); return sb.toString(); } } @@ -372,7 +373,7 @@ public class DataBlockEncodingTool { private void benchmarkDefaultCompression(int totalSize, byte[] rawBuffer) throws IOException { benchmarkAlgorithm(compressionAlgorithm, - compressionAlgorithmName.toUpperCase(), rawBuffer, 0, totalSize); + compressionAlgorithmName.toUpperCase(Locale.ROOT), rawBuffer, 0, totalSize); } /** @@ -526,7 +527,7 @@ public class DataBlockEncodingTool { * @throws IOException */ public void displayStatistics() throws IOException { - final String comprAlgo = compressionAlgorithmName.toUpperCase(); + final String comprAlgo = compressionAlgorithmName.toUpperCase(Locale.ROOT); long rawBytes = totalKeyLength + totalPrefixLength + totalValueLength; System.out.println("Raw data size:"); @@ -694,7 +695,7 @@ public class DataBlockEncodingTool { String compressionName = DEFAULT_COMPRESSION.getName(); if (cmd.hasOption(OPT_ENCODING_ALGORITHM)) { compressionName = - cmd.getOptionValue(OPT_ENCODING_ALGORITHM).toLowerCase(); + cmd.getOptionValue(OPT_ENCODING_ALGORITHM).toLowerCase(Locale.ROOT); } boolean doBenchmark = cmd.hasOption(OPT_MEASURE_THROUGHPUT); boolean doVerify = !cmd.hasOption(OPT_OMIT_CORRECTNESS_TEST); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java index 74a99837b30..b1b35c943e9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java @@ -24,6 +24,7 @@ import java.security.SecureRandom; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Locale; import java.util.Properties; import java.util.Random; import java.util.concurrent.atomic.AtomicReference; @@ -124,7 +125,7 @@ public class LoadTestTool extends AbstractHBaseTool { public static final String OPT_DEFERRED_LOG_FLUSH_USAGE = "Enable deferred log flush."; public static final String OPT_DATA_BLOCK_ENCODING = - HColumnDescriptor.DATA_BLOCK_ENCODING.toLowerCase(); + HColumnDescriptor.DATA_BLOCK_ENCODING.toLowerCase(Locale.ROOT); public static final String OPT_INMEMORY = "in_memory"; public static final String OPT_USAGE_IN_MEMORY = "Tries to keep the HFiles of the CF " + diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java index 5fbde7a5729..da33cc0a92c 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/HThreadedSelectorServerArgs.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.thrift; +import java.util.Locale; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -79,7 +80,7 @@ public class HThreadedSelectorServerArgs extends TThreadedSelectorServer.Args { int acceptQueueSizePerThread = conf.getInt( ACCEPT_QUEUE_SIZE_PER_THREAD_CONF_KEY, getAcceptQueueSizePerThread()); AcceptPolicy acceptPolicy = AcceptPolicy.valueOf(conf.get( - ACCEPT_POLICY_CONF_KEY, getAcceptPolicy().toString()).toUpperCase()); + ACCEPT_POLICY_CONF_KEY, getAcceptPolicy().toString()).toUpperCase(Locale.ROOT)); super.selectorThreads(selectorThreads) .workerThreads(workerThreads) diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java index 57c237ef646..8aec6c52533 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java @@ -23,6 +23,7 @@ import static org.apache.hadoop.hbase.util.Bytes.getBytes; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; +import java.util.Locale; import java.util.TreeMap; import org.apache.hadoop.hbase.classification.InterfaceAudience; @@ -59,7 +60,7 @@ public class ThriftUtilities { static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in) throws IllegalArgument { Compression.Algorithm comp = - Compression.getCompressionAlgorithmByName(in.compression.toLowerCase()); + Compression.getCompressionAlgorithmByName(in.compression.toLowerCase(Locale.ROOT)); BloomType bt = BloomType.valueOf(in.bloomFilterType);