HBASE-15889. String case conversions are locale-sensitive, used without locale

Signed-off-by: Sean Busbey <busbey@apache.org>
This commit is contained in:
Sean Mackrory 2016-05-25 07:49:01 -06:00 committed by Sean Busbey
parent 6d0e8b2a91
commit 878b1ea721
39 changed files with 111 additions and 72 deletions

View File

@ -21,6 +21,7 @@ import com.sun.javadoc.DocErrorReporter;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Locale;
class StabilityOptions { class StabilityOptions {
public static final String STABLE_OPTION = "-stable"; public static final String STABLE_OPTION = "-stable";
@ -28,7 +29,7 @@ class StabilityOptions {
public static final String UNSTABLE_OPTION = "-unstable"; public static final String UNSTABLE_OPTION = "-unstable";
public static Integer optionLength(String option) { public static Integer optionLength(String option) {
String opt = option.toLowerCase(); String opt = option.toLowerCase(Locale.ROOT);
if (opt.equals(UNSTABLE_OPTION)) return 1; if (opt.equals(UNSTABLE_OPTION)) return 1;
if (opt.equals(EVOLVING_OPTION)) return 1; if (opt.equals(EVOLVING_OPTION)) return 1;
if (opt.equals(STABLE_OPTION)) return 1; if (opt.equals(STABLE_OPTION)) return 1;
@ -37,7 +38,7 @@ class StabilityOptions {
public static void validOptions(String[][] options, DocErrorReporter reporter) { public static void validOptions(String[][] options, DocErrorReporter reporter) {
for (int i = 0; i < options.length; i++) { for (int i = 0; i < options.length; i++) {
String opt = options[i][0].toLowerCase(); String opt = options[i][0].toLowerCase(Locale.ROOT);
if (opt.equals(UNSTABLE_OPTION)) { if (opt.equals(UNSTABLE_OPTION)) {
RootDocProcessor.stability = UNSTABLE_OPTION; RootDocProcessor.stability = UNSTABLE_OPTION;
} else if (opt.equals(EVOLVING_OPTION)) { } else if (opt.equals(EVOLVING_OPTION)) {

View File

@ -24,6 +24,7 @@ import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -477,11 +478,11 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
setBlockCacheEnabled(blockCacheEnabled); setBlockCacheEnabled(blockCacheEnabled);
setTimeToLive(timeToLive); setTimeToLive(timeToLive);
setCompressionType(Compression.Algorithm. setCompressionType(Compression.Algorithm.
valueOf(compression.toUpperCase())); valueOf(compression.toUpperCase(Locale.ROOT)));
setDataBlockEncoding(DataBlockEncoding. setDataBlockEncoding(DataBlockEncoding.
valueOf(dataBlockEncoding.toUpperCase())); valueOf(dataBlockEncoding.toUpperCase(Locale.ROOT)));
setBloomFilterType(BloomType. setBloomFilterType(BloomType.
valueOf(bloomFilter.toUpperCase())); valueOf(bloomFilter.toUpperCase(Locale.ROOT)));
setBlocksize(blocksize); setBlocksize(blocksize);
setScope(scope); setScope(scope);
} }
@ -603,7 +604,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
if (n == null) { if (n == null) {
return Compression.Algorithm.NONE; return Compression.Algorithm.NONE;
} }
return Compression.Algorithm.valueOf(n.toUpperCase()); return Compression.Algorithm.valueOf(n.toUpperCase(Locale.ROOT));
} }
/** @return compression type being used for the column family for major /** @return compression type being used for the column family for major
@ -613,7 +614,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
if (n == null) { if (n == null) {
return getCompression(); return getCompression();
} }
return Compression.Algorithm.valueOf(n.toUpperCase()); return Compression.Algorithm.valueOf(n.toUpperCase(Locale.ROOT));
} }
/** @return maximum number of versions */ /** @return maximum number of versions */
@ -708,7 +709,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
* @return this (for chained invocation) * @return this (for chained invocation)
*/ */
public HColumnDescriptor setCompressionType(Compression.Algorithm type) { public HColumnDescriptor setCompressionType(Compression.Algorithm type) {
return setValue(COMPRESSION, type.getName().toUpperCase()); return setValue(COMPRESSION, type.getName().toUpperCase(Locale.ROOT));
} }
/** /**
@ -820,7 +821,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
*/ */
public HColumnDescriptor setCompactionCompressionType( public HColumnDescriptor setCompactionCompressionType(
Compression.Algorithm type) { Compression.Algorithm type) {
return setValue(COMPRESSION_COMPACT, type.getName().toUpperCase()); return setValue(COMPRESSION_COMPACT, type.getName().toUpperCase(Locale.ROOT));
} }
/** /**
@ -847,7 +848,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
String value = getValue(KEEP_DELETED_CELLS); String value = getValue(KEEP_DELETED_CELLS);
if (value != null) { if (value != null) {
// toUpperCase for backwards compatibility // toUpperCase for backwards compatibility
return KeepDeletedCells.valueOf(value.toUpperCase()); return KeepDeletedCells.valueOf(value.toUpperCase(Locale.ROOT));
} }
return DEFAULT_KEEP_DELETED; return DEFAULT_KEEP_DELETED;
} }
@ -937,7 +938,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
if (n == null) { if (n == null) {
n = DEFAULT_BLOOMFILTER; n = DEFAULT_BLOOMFILTER;
} }
return BloomType.valueOf(n.toUpperCase()); return BloomType.valueOf(n.toUpperCase(Locale.ROOT));
} }
/** /**

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.util.Bytes;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern; import java.util.regex.Pattern;
/** /**
@ -229,7 +230,7 @@ public class ServerName implements Comparable<ServerName>, Serializable {
*/ */
static String getServerName(String hostName, int port, long startcode) { static String getServerName(String hostName, int port, long startcode) {
final StringBuilder name = new StringBuilder(hostName.length() + 1 + 5 + 1 + 13); final StringBuilder name = new StringBuilder(hostName.length() + 1 + 5 + 1 + 13);
name.append(hostName.toLowerCase()); name.append(hostName.toLowerCase(Locale.ROOT));
name.append(SERVERNAME_SEPARATOR); name.append(SERVERNAME_SEPARATOR);
name.append(port); name.append(port);
name.append(SERVERNAME_SEPARATOR); name.append(SERVERNAME_SEPARATOR);

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.filter; package org.apache.hadoop.hbase.filter;
import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.InvalidProtocolBufferException;
import java.util.Locale;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
@ -52,8 +53,8 @@ public class SubstringComparator extends ByteArrayComparable {
* @param substr the substring * @param substr the substring
*/ */
public SubstringComparator(String substr) { public SubstringComparator(String substr) {
super(Bytes.toBytes(substr.toLowerCase())); super(Bytes.toBytes(substr.toLowerCase(Locale.ROOT)));
this.substr = substr.toLowerCase(); this.substr = substr.toLowerCase(Locale.ROOT);
} }
@Override @Override
@ -63,7 +64,7 @@ public class SubstringComparator extends ByteArrayComparable {
@Override @Override
public int compareTo(byte[] value, int offset, int length) { public int compareTo(byte[] value, int offset, int length) {
return Bytes.toString(value, offset, length).toLowerCase().contains(substr) ? 0 return Bytes.toString(value, offset, length).toLowerCase(Locale.ROOT).contains(substr) ? 0
: 1; : 1;
} }

View File

@ -27,6 +27,7 @@ import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Random; import java.util.Random;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -250,7 +251,7 @@ public class AsyncRpcChannel {
return new SaslClientHandler(realTicket, authMethod, token, serverPrincipal, return new SaslClientHandler(realTicket, authMethod, token, serverPrincipal,
client.fallbackAllowed, client.fallbackAllowed,
client.conf.get("hbase.rpc.protection", client.conf.get("hbase.rpc.protection",
SaslUtil.QualityOfProtection.AUTHENTICATION.name().toLowerCase()), SaslUtil.QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)),
new SaslClientHandler.SaslExceptionHandler() { new SaslClientHandler.SaslExceptionHandler() {
@Override @Override
public void handle(int retryCount, Random random, Throwable cause) { public void handle(int retryCount, Random random, Throwable cause) {
@ -458,7 +459,7 @@ public class AsyncRpcChannel {
throw new IOException("Can't obtain server Kerberos config key from SecurityInfo"); throw new IOException("Can't obtain server Kerberos config key from SecurityInfo");
} }
this.serverPrincipal = SecurityUtil.getServerPrincipal(client.conf.get(serverKey), this.serverPrincipal = SecurityUtil.getServerPrincipal(client.conf.get(serverKey),
address.getAddress().getCanonicalHostName().toLowerCase()); address.getAddress().getCanonicalHostName().toLowerCase(Locale.ROOT));
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("RPC Server Kerberos principal name for service=" + serviceName + " is " LOG.debug("RPC Server Kerberos principal name for service=" + serviceName + " is "
+ serverPrincipal); + serverPrincipal);

View File

@ -38,6 +38,7 @@ import java.security.PrivilegedExceptionAction;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Random; import java.util.Random;
@ -332,7 +333,7 @@ public class RpcClientImpl extends AbstractRpcClient {
"Can't obtain server Kerberos config key from SecurityInfo"); "Can't obtain server Kerberos config key from SecurityInfo");
} }
serverPrincipal = SecurityUtil.getServerPrincipal( serverPrincipal = SecurityUtil.getServerPrincipal(
conf.get(serverKey), server.getAddress().getCanonicalHostName().toLowerCase()); conf.get(serverKey), server.getAddress().getCanonicalHostName().toLowerCase(Locale.ROOT));
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("RPC Server Kerberos principal name for service=" LOG.debug("RPC Server Kerberos principal name for service="
+ remoteId.getServiceName() + " is " + serverPrincipal); + remoteId.getServiceName() + " is " + serverPrincipal);
@ -613,7 +614,7 @@ public class RpcClientImpl extends AbstractRpcClient {
final OutputStream out2) throws IOException { final OutputStream out2) throws IOException {
saslRpcClient = new HBaseSaslRpcClient(authMethod, token, serverPrincipal, fallbackAllowed, saslRpcClient = new HBaseSaslRpcClient(authMethod, token, serverPrincipal, fallbackAllowed,
conf.get("hbase.rpc.protection", conf.get("hbase.rpc.protection",
QualityOfProtection.AUTHENTICATION.name().toLowerCase())); QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)));
return saslRpcClient.saslConnect(in2, out2); return saslRpcClient.saslConnect(in2, out2);
} }

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.security;
import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
@ -79,13 +80,13 @@ public class SaslUtil {
*/ */
public static QualityOfProtection getQop(String stringQop) { public static QualityOfProtection getQop(String stringQop) {
QualityOfProtection qop = null; QualityOfProtection qop = null;
if (QualityOfProtection.AUTHENTICATION.name().toLowerCase().equals(stringQop) if (QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT).equals(stringQop)
|| QualityOfProtection.AUTHENTICATION.saslQop.equals(stringQop)) { || QualityOfProtection.AUTHENTICATION.saslQop.equals(stringQop)) {
qop = QualityOfProtection.AUTHENTICATION; qop = QualityOfProtection.AUTHENTICATION;
} else if (QualityOfProtection.INTEGRITY.name().toLowerCase().equals(stringQop) } else if (QualityOfProtection.INTEGRITY.name().toLowerCase(Locale.ROOT).equals(stringQop)
|| QualityOfProtection.INTEGRITY.saslQop.equals(stringQop)) { || QualityOfProtection.INTEGRITY.saslQop.equals(stringQop)) {
qop = QualityOfProtection.INTEGRITY; qop = QualityOfProtection.INTEGRITY;
} else if (QualityOfProtection.PRIVACY.name().toLowerCase().equals(stringQop) } else if (QualityOfProtection.PRIVACY.name().toLowerCase(Locale.ROOT).equals(stringQop)
|| QualityOfProtection.PRIVACY.saslQop.equals(stringQop)) { || QualityOfProtection.PRIVACY.saslQop.equals(stringQop)) {
qop = QualityOfProtection.PRIVACY; qop = QualityOfProtection.PRIVACY;
} }

View File

@ -23,6 +23,7 @@ import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
@ -254,7 +255,7 @@ public class PoolMap<K, V> implements Map<K, V> {
} }
public static String fuzzyNormalize(String name) { public static String fuzzyNormalize(String name) {
return name != null ? name.replaceAll("-", "").trim().toLowerCase() : ""; return name != null ? name.replaceAll("-", "").trim().toLowerCase(Locale.ROOT) : "";
} }
public static PoolType fuzzyMatch(String name) { public static PoolType fuzzyMatch(String name) {

View File

@ -26,6 +26,7 @@ import java.net.SocketTimeoutException;
import java.net.UnknownHostException; import java.net.UnknownHostException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Locale;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -409,7 +410,7 @@ public class MetaTableLocator {
} else if (cause != null && cause instanceof EOFException) { } else if (cause != null && cause instanceof EOFException) {
// Catch. Other end disconnected us. // Catch. Other end disconnected us.
} else if (cause != null && cause.getMessage() != null && } else if (cause != null && cause.getMessage() != null &&
cause.getMessage().toLowerCase().contains("connection reset")) { cause.getMessage().toLowerCase(Locale.ROOT).contains("connection reset")) {
// Catch. Connection reset. // Catch. Connection reset.
} else { } else {
throw ioe; throw ioe;

View File

@ -30,6 +30,7 @@ import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.security.UnrecoverableKeyException; import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException; import java.security.cert.CertificateException;
import java.util.Locale;
import java.util.Properties; import java.util.Properties;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -142,7 +143,7 @@ public class KeyStoreKeyProvider implements KeyProvider {
throw new RuntimeException("KeyProvider scheme should specify KeyStore type"); throw new RuntimeException("KeyProvider scheme should specify KeyStore type");
} }
// KeyStore expects instance type specifications in uppercase // KeyStore expects instance type specifications in uppercase
store = KeyStore.getInstance(storeType.toUpperCase()); store = KeyStore.getInstance(storeType.toUpperCase(Locale.ROOT));
processParameters(uri); processParameters(uri);
load(uri); load(uri);
} catch (URISyntaxException e) { } catch (URISyntaxException e) {

View File

@ -20,6 +20,7 @@
package org.apache.hadoop.hbase.ipc; package org.apache.hadoop.hbase.ipc;
import java.util.HashMap; import java.util.HashMap;
import java.util.Locale;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -48,7 +49,7 @@ public class MetricsHBaseServerSourceFactoryImpl extends MetricsHBaseServerSourc
source = new MetricsHBaseServerSourceImpl( source = new MetricsHBaseServerSourceImpl(
context, context,
METRICS_DESCRIPTION, METRICS_DESCRIPTION,
context.toLowerCase(), context.toLowerCase(Locale.ROOT),
context + METRICS_JMX_CONTEXT_SUFFIX, wrap); context + METRICS_JMX_CONTEXT_SUFFIX, wrap);
//Store back in storage //Store back in storage

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import java.util.HashMap; import java.util.HashMap;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import static org.junit.Assert.*; import static org.junit.Assert.*;
@ -246,6 +247,6 @@ public class MetricsAssertHelperImpl implements MetricsAssertHelper {
} }
private String canonicalizeMetricName(String in) { private String canonicalizeMetricName(String in) {
return in.toLowerCase().replaceAll("[^A-Za-z0-9 ]", ""); return in.toLowerCase(Locale.ROOT).replaceAll("[^A-Za-z0-9 ]", "");
} }
} }

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
@ -205,7 +206,7 @@ public class HBaseClusterManager extends Configured implements ClusterManager {
@Override @Override
public String getCommand(ServiceType service, Operation op) { public String getCommand(ServiceType service, Operation op) {
return String.format("%s/bin/hbase-daemon.sh %s %s %s", hbaseHome, confDir, return String.format("%s/bin/hbase-daemon.sh %s %s %s", hbaseHome, confDir,
op.toString().toLowerCase(), service); op.toString().toLowerCase(Locale.ROOT), service);
} }
} }
@ -235,7 +236,7 @@ public class HBaseClusterManager extends Configured implements ClusterManager {
@Override @Override
public String getCommand(ServiceType service, Operation op) { public String getCommand(ServiceType service, Operation op) {
return String.format("%s/sbin/hadoop-daemon.sh %s %s %s", hadoopHome, confDir, return String.format("%s/sbin/hadoop-daemon.sh %s %s %s", hadoopHome, confDir,
op.toString().toLowerCase(), service); op.toString().toLowerCase(Locale.ROOT), service);
} }
} }
@ -264,7 +265,7 @@ public class HBaseClusterManager extends Configured implements ClusterManager {
@Override @Override
public String getCommand(ServiceType service, Operation op) { public String getCommand(ServiceType service, Operation op) {
return String.format("%s/bin/zkServer.sh %s", zookeeperHome, op.toString().toLowerCase()); return String.format("%s/bin/zkServer.sh %s", zookeeperHome, op.toString().toLowerCase(Locale.ROOT));
} }
@Override @Override

View File

@ -37,6 +37,7 @@ import javax.xml.ws.http.HTTPException;
import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;
import java.util.HashMap; import java.util.HashMap;
import java.util.Locale;
import java.util.Map; import java.util.Map;
/** /**
@ -274,8 +275,8 @@ public class RESTApiClusterManager extends Configured implements ClusterManager
if (role.get("hostRef").get("hostId").getTextValue().equals(hostId) && if (role.get("hostRef").get("hostId").getTextValue().equals(hostId) &&
role.get("type") role.get("type")
.getTextValue() .getTextValue()
.toLowerCase() .toLowerCase(Locale.ROOT)
.equals(roleType.toLowerCase())) { .equals(roleType.toLowerCase(Locale.ROOT))) {
roleValue = role.get(property).getTextValue(); roleValue = role.get(property).getTextValue();
break; break;
} }
@ -328,7 +329,7 @@ public class RESTApiClusterManager extends Configured implements ClusterManager
// APIs tend to take commands in lowercase, so convert them to save the trouble later. // APIs tend to take commands in lowercase, so convert them to save the trouble later.
@Override @Override
public String toString() { public String toString() {
return name().toLowerCase(); return name().toLowerCase(Locale.ROOT);
} }
} }
@ -348,4 +349,4 @@ public class RESTApiClusterManager extends Configured implements ClusterManager
private enum Service { private enum Service {
HBASE, HDFS, MAPREDUCE HBASE, HDFS, MAPREDUCE
} }
} }

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import java.io.IOException; import java.io.IOException;
import java.util.Locale;
import java.util.Set; import java.util.Set;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
@ -111,7 +112,7 @@ public class StripeCompactionsPerformanceEvaluation extends AbstractHBaseTool {
} else { } else {
minValueSize = maxValueSize = Integer.parseInt(valueSize); minValueSize = maxValueSize = Integer.parseInt(valueSize);
} }
String datagen = cmd.getOptionValue(DATAGEN_KEY, "default").toLowerCase(); String datagen = cmd.getOptionValue(DATAGEN_KEY, "default").toLowerCase(Locale.ROOT);
if ("default".equals(datagen)) { if ("default".equals(datagen)) {
dataGen = new MultiThreadedAction.DefaultDataGenerator( dataGen = new MultiThreadedAction.DefaultDataGenerator(
minValueSize, maxValueSize, 1, 1, new byte[][] { COLUMN_FAMILY }); minValueSize, maxValueSize, 1, 1, new byte[][] { COLUMN_FAMILY });

View File

@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.rest.filter;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.HashSet; import java.util.HashSet;
import java.util.Locale;
import java.util.Set; import java.util.Set;
import java.util.StringTokenizer; import java.util.StringTokenizer;
@ -65,11 +66,11 @@ public class GzipFilter implements Filter {
String acceptEncoding = request.getHeader("accept-encoding"); String acceptEncoding = request.getHeader("accept-encoding");
String contentType = request.getHeader("content-type"); String contentType = request.getHeader("content-type");
if ((contentEncoding != null) && if ((contentEncoding != null) &&
(contentEncoding.toLowerCase().indexOf("gzip") > -1)) { (contentEncoding.toLowerCase(Locale.ROOT).indexOf("gzip") > -1)) {
request = new GZIPRequestWrapper(request); request = new GZIPRequestWrapper(request);
} }
if (((acceptEncoding != null) && if (((acceptEncoding != null) &&
(acceptEncoding.toLowerCase().indexOf("gzip") > -1)) || (acceptEncoding.toLowerCase(Locale.ROOT).indexOf("gzip") > -1)) ||
((contentType != null) && mimeTypes.contains(contentType))) { ((contentType != null) && mimeTypes.contains(contentType))) {
response = new GZIPResponseWrapper(response); response = new GZIPResponseWrapper(response);
} }

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.ipc;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.concurrent.BlockingQueue; import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
@ -221,7 +222,7 @@ public abstract class RpcExecutor {
*/ */
public void resizeQueues(Configuration conf) { public void resizeQueues(Configuration conf) {
String configKey = RpcScheduler.IPC_SERVER_MAX_CALLQUEUE_LENGTH; String configKey = RpcScheduler.IPC_SERVER_MAX_CALLQUEUE_LENGTH;
if (name != null && name.toLowerCase().contains("priority")) { if (name != null && name.toLowerCase(Locale.ROOT).contains("priority")) {
configKey = RpcScheduler.IPC_SERVER_PRIORITY_MAX_CALLQUEUE_LENGTH; configKey = RpcScheduler.IPC_SERVER_PRIORITY_MAX_CALLQUEUE_LENGTH;
} }
currentQueueLimit = conf.getInt(configKey, currentQueueLimit); currentQueueLimit = conf.getInt(configKey, currentQueueLimit);

View File

@ -28,6 +28,7 @@ import java.lang.reflect.Method;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.UUID; import java.util.UUID;
@ -391,7 +392,7 @@ public class Import {
filter = instantiateFilter(conf); filter = instantiateFilter(conf);
String durabilityStr = conf.get(WAL_DURABILITY); String durabilityStr = conf.get(WAL_DURABILITY);
if(durabilityStr != null){ if(durabilityStr != null){
durability = Durability.valueOf(durabilityStr.toUpperCase()); durability = Durability.valueOf(durabilityStr.toUpperCase(Locale.ROOT));
} }
// TODO: This is kind of ugly doing setup of ZKW just to read the clusterid. // TODO: This is kind of ugly doing setup of ZKW just to read the clusterid.
ZooKeeperWatcher zkw = null; ZooKeeperWatcher zkw = null;

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Locale;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -253,7 +254,7 @@ implements Configurable {
@Override @Override
public List<InputSplit> getSplits(JobContext context) throws IOException { public List<InputSplit> getSplits(JobContext context) throws IOException {
List<InputSplit> splits = super.getSplits(context); List<InputSplit> splits = super.getSplits(context);
if ((conf.get(SHUFFLE_MAPS) != null) && "true".equals(conf.get(SHUFFLE_MAPS).toLowerCase())) { if ((conf.get(SHUFFLE_MAPS) != null) && "true".equals(conf.get(SHUFFLE_MAPS).toLowerCase(Locale.ROOT))) {
Collections.shuffle(splits); Collections.shuffle(splits);
} }
return splits; return splits;

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.UUID; import java.util.UUID;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -61,7 +62,7 @@ implements RowProcessor<S,T> {
@Override @Override
public String getName() { public String getName() {
return this.getClass().getSimpleName().toLowerCase(); return this.getClass().getSimpleName().toLowerCase(Locale.ROOT);
} }
@Override @Override

View File

@ -34,6 +34,7 @@ import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.NavigableMap; import java.util.NavigableMap;
@ -8433,7 +8434,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
} }
boolean majorCompact = false; boolean majorCompact = false;
if (args.length > 1) { if (args.length > 1) {
if (!args[1].toLowerCase().startsWith("major")) { if (!args[1].toLowerCase(Locale.ROOT).startsWith("major")) {
printUsageAndExit("ERROR: Unrecognized option <" + args[1] + ">"); printUsageAndExit("ERROR: Unrecognized option <" + args[1] + ">");
} }
majorCompact = true; majorCompact = true;

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.security;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.DataInputStream; import java.io.DataInputStream;
import java.io.IOException; import java.io.IOException;
import java.util.Locale;
import javax.security.auth.callback.Callback; import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler; import javax.security.auth.callback.CallbackHandler;
@ -50,7 +51,7 @@ public class HBaseSaslRpcServer {
public static void init(Configuration conf) { public static void init(Configuration conf) {
SaslUtil.initSaslProperties(conf.get("hbase.rpc.protection", SaslUtil.initSaslProperties(conf.get("hbase.rpc.protection",
QualityOfProtection.AUTHENTICATION.name().toLowerCase())); QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)));
} }
public static <T extends TokenIdentifier> T getIdentifier(String id, public static <T extends TokenIdentifier> T getIdentifier(String id,

View File

@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import java.util.Arrays; import java.util.Arrays;
import java.util.Locale;
/** /**
@ -66,7 +67,7 @@ public class CreateSnapshot extends AbstractHBaseTool {
admin = connection.getAdmin(); admin = connection.getAdmin();
HBaseProtos.SnapshotDescription.Type type = HBaseProtos.SnapshotDescription.Type.FLUSH; HBaseProtos.SnapshotDescription.Type type = HBaseProtos.SnapshotDescription.Type.FLUSH;
if (snapshotType != null) { if (snapshotType != null) {
type = HBaseProtos.SnapshotDescription.Type.valueOf(snapshotName.toUpperCase()); type = HBaseProtos.SnapshotDescription.Type.valueOf(snapshotName.toUpperCase(Locale.ROOT));
} }
admin.snapshot(snapshotName, TableName.valueOf(tableName), type); admin.snapshot(snapshotName, TableName.valueOf(tableName), type);

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import java.io.IOException; import java.io.IOException;
import java.util.Locale;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
@ -53,7 +54,7 @@ public class CompressionTest {
private static final Log LOG = LogFactory.getLog(CompressionTest.class); private static final Log LOG = LogFactory.getLog(CompressionTest.class);
public static boolean testCompression(String codec) { public static boolean testCompression(String codec) {
codec = codec.toLowerCase(); codec = codec.toLowerCase(Locale.ROOT);
Compression.Algorithm a; Compression.Algorithm a;
@ -109,7 +110,7 @@ public class CompressionTest {
System.err.println( System.err.println(
"Usage: CompressionTest <path> " + "Usage: CompressionTest <path> " +
StringUtils.join( Compression.Algorithm.values(), "|").toLowerCase() + StringUtils.join( Compression.Algorithm.values(), "|").toLowerCase(Locale.ROOT) +
"\n" + "\n" +
"For example:\n" + "For example:\n" +
" hbase " + CompressionTest.class + " file:///tmp/testfile gz\n"); " hbase " + CompressionTest.class + " file:///tmp/testfile gz\n");

View File

@ -25,6 +25,7 @@ import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.List; import java.util.List;
import java.util.Locale;
import javax.management.JMException; import javax.management.JMException;
import javax.management.MBeanServer; import javax.management.MBeanServer;
@ -87,7 +88,7 @@ public class DirectMemoryUtils {
long multiplier = 1; //for the byte case. long multiplier = 1; //for the byte case.
for (String s : arguments) { for (String s : arguments) {
if (s.contains("-XX:MaxDirectMemorySize=")) { if (s.contains("-XX:MaxDirectMemorySize=")) {
String memSize = s.toLowerCase() String memSize = s.toLowerCase(Locale.ROOT)
.replace("-xx:maxdirectmemorysize=", "").trim(); .replace("-xx:maxdirectmemorysize=", "").trim();
if (memSize.contains("k")) { if (memSize.contains("k")) {

View File

@ -35,6 +35,7 @@ import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
@ -120,7 +121,7 @@ public abstract class FSUtils {
*/ */
public static void setStoragePolicy(final FileSystem fs, final Configuration conf, public static void setStoragePolicy(final FileSystem fs, final Configuration conf,
final Path path, final String policyKey, final String defaultPolicy) { final Path path, final String policyKey, final String defaultPolicy) {
String storagePolicy = conf.get(policyKey, defaultPolicy).toUpperCase(); String storagePolicy = conf.get(policyKey, defaultPolicy).toUpperCase(Locale.ROOT);
if (storagePolicy.equals(defaultPolicy)) { if (storagePolicy.equals(defaultPolicy)) {
if (LOG.isTraceEnabled()) { if (LOG.isTraceEnabled()) {
LOG.trace("default policy of " + defaultPolicy + " requested, exiting early."); LOG.trace("default policy of " + defaultPolicy + " requested, exiting early.");
@ -1910,7 +1911,7 @@ public abstract class FSUtils {
return false; return false;
} }
if (!regionName.toLowerCase().matches("[0-9a-f]+")) { if (!regionName.toLowerCase(Locale.ROOT).matches("[0-9a-f]+")) {
return false; return false;
} }
return true; return true;

View File

@ -44,6 +44,7 @@ import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
@ -4070,7 +4071,7 @@ public class HBaseFsck extends Configured implements Closeable {
errors.progress(); errors.progress();
String encodedName = regionDir.getPath().getName(); String encodedName = regionDir.getPath().getName();
// ignore directories that aren't hexadecimal // ignore directories that aren't hexadecimal
if (!encodedName.toLowerCase().matches("[0-9a-f]+")) { if (!encodedName.toLowerCase(Locale.ROOT).matches("[0-9a-f]+")) {
continue; continue;
} }

View File

@ -22,6 +22,7 @@ import java.lang.management.ManagementFactory;
import java.lang.management.RuntimeMXBean; import java.lang.management.RuntimeMXBean;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.Locale;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Set; import java.util.Set;
@ -103,8 +104,8 @@ public abstract class ServerCommandLine extends Configured implements Tool {
nextEnv: nextEnv:
for (Entry<String, String> entry : System.getenv().entrySet()) { for (Entry<String, String> entry : System.getenv().entrySet()) {
String key = entry.getKey().toLowerCase(); String key = entry.getKey().toLowerCase(Locale.ROOT);
String value = entry.getValue().toLowerCase(); String value = entry.getValue().toLowerCase(Locale.ROOT);
// exclude variables which may contain skip words // exclude variables which may contain skip words
for(String skipWord : skipWords) { for(String skipWord : skipWords) {
if (key.contains(skipWord) || value.contains(skipWord)) if (key.contains(skipWord) || value.contains(skipWord))

View File

@ -31,6 +31,7 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Date; import java.util.Date;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Queue; import java.util.Queue;
import java.util.Random; import java.util.Random;
@ -312,8 +313,8 @@ public class PerformanceEvaluation extends Configured implements Tool {
static boolean checkTable(Admin admin, TestOptions opts) throws IOException { static boolean checkTable(Admin admin, TestOptions opts) throws IOException {
TableName tableName = TableName.valueOf(opts.tableName); TableName tableName = TableName.valueOf(opts.tableName);
boolean needsDelete = false, exists = admin.tableExists(tableName); boolean needsDelete = false, exists = admin.tableExists(tableName);
boolean isReadCmd = opts.cmdName.toLowerCase().contains("read") boolean isReadCmd = opts.cmdName.toLowerCase(Locale.ROOT).contains("read")
|| opts.cmdName.toLowerCase().contains("scan"); || opts.cmdName.toLowerCase(Locale.ROOT).contains("scan");
if (!exists && isReadCmd) { if (!exists && isReadCmd) {
throw new IllegalStateException( throw new IllegalStateException(
"Must specify an existing table for read commands. Run a write command first."); "Must specify an existing table for read commands. Run a write command first.");

View File

@ -24,6 +24,7 @@ import java.net.InetAddress;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.ServerSocket; import java.net.ServerSocket;
import java.nio.channels.ServerSocketChannel; import java.nio.channels.ServerSocketChannel;
import java.util.Locale;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -123,7 +124,7 @@ public class TestIPv6NIOServerSocketChannel {
//java.net.SocketException: Address family not supported by protocol family //java.net.SocketException: Address family not supported by protocol family
//or java.net.SocketException: Protocol family not supported //or java.net.SocketException: Protocol family not supported
Assert.assertFalse(ex.getClass().isInstance(BindException.class)); Assert.assertFalse(ex.getClass().isInstance(BindException.class));
Assert.assertTrue(ex.getMessage().toLowerCase().contains("protocol family")); Assert.assertTrue(ex.getMessage().toLowerCase(Locale.ROOT).contains("protocol family"));
LOG.info("Received expected exception:"); LOG.info("Received expected exception:");
LOG.info(ex); LOG.info(ex);

View File

@ -27,6 +27,7 @@ import java.lang.management.ThreadMXBean;
import java.text.DateFormat; import java.text.DateFormat;
import java.text.SimpleDateFormat; import java.text.SimpleDateFormat;
import java.util.Date; import java.util.Date;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import org.junit.runner.notification.Failure; import org.junit.runner.notification.Failure;
@ -93,7 +94,7 @@ public class TimedOutTestsListener extends RunListener {
thread.getPriority(), thread.getPriority(),
thread.getId(), thread.getId(),
Thread.State.WAITING.equals(thread.getState()) ? Thread.State.WAITING.equals(thread.getState()) ?
"in Object.wait()" : thread.getState().name().toLowerCase(), "in Object.wait()" : thread.getState().name().toLowerCase(Locale.ROOT),
Thread.State.WAITING.equals(thread.getState()) ? Thread.State.WAITING.equals(thread.getState()) ?
"WAITING (on object monitor)" : thread.getState())); "WAITING (on object monitor)" : thread.getState()));
for (StackTraceElement stackTraceElement : e.getValue()) { for (StackTraceElement stackTraceElement : e.getValue()) {

View File

@ -47,6 +47,7 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.NavigableMap; import java.util.NavigableMap;
@ -230,8 +231,8 @@ public abstract class MultiTableInputFormatTestBase {
private void testScan(String start, String stop, String last) private void testScan(String start, String stop, String last)
throws IOException, InterruptedException, ClassNotFoundException { throws IOException, InterruptedException, ClassNotFoundException {
String jobName = String jobName =
"Scan" + (start != null ? start.toUpperCase() : "Empty") + "To" + "Scan" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") + "To" +
(stop != null ? stop.toUpperCase() : "Empty"); (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty");
LOG.info("Before map/reduce startup - job " + jobName); LOG.info("Before map/reduce startup - job " + jobName);
Configuration c = new Configuration(TEST_UTIL.getConfiguration()); Configuration c = new Configuration(TEST_UTIL.getConfiguration());

View File

@ -24,6 +24,7 @@ import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import java.io.IOException; import java.io.IOException;
import java.util.Locale;
import java.util.TreeMap; import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -385,7 +386,7 @@ public class TestLoadIncrementalHFiles {
// set real family name to upper case in purpose to simulate the case that // set real family name to upper case in purpose to simulate the case that
// family name in HFiles is invalid // family name in HFiles is invalid
HColumnDescriptor family = HColumnDescriptor family =
new HColumnDescriptor(Bytes.toBytes(new String(FAMILY).toUpperCase())); new HColumnDescriptor(Bytes.toBytes(new String(FAMILY).toUpperCase(Locale.ROOT)));
htd.addFamily(family); htd.addFamily(family);
try { try {

View File

@ -24,6 +24,7 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.NavigableMap; import java.util.NavigableMap;
@ -206,8 +207,8 @@ public class TestMultiTableInputFormat {
private void testScan(String start, String stop, String last) private void testScan(String start, String stop, String last)
throws IOException, InterruptedException, ClassNotFoundException { throws IOException, InterruptedException, ClassNotFoundException {
String jobName = String jobName =
"Scan" + (start != null ? start.toUpperCase() : "Empty") + "To" + "Scan" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") + "To" +
(stop != null ? stop.toUpperCase() : "Empty"); (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty");
LOG.info("Before map/reduce startup - job " + jobName); LOG.info("Before map/reduce startup - job " + jobName);
Configuration c = new Configuration(TEST_UTIL.getConfiguration()); Configuration c = new Configuration(TEST_UTIL.getConfiguration());

View File

@ -23,6 +23,7 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.NavigableMap; import java.util.NavigableMap;
@ -177,8 +178,8 @@ public abstract class TestTableInputFormatScanBase {
*/ */
protected void testScanFromConfiguration(String start, String stop, String last) protected void testScanFromConfiguration(String start, String stop, String last)
throws IOException, InterruptedException, ClassNotFoundException { throws IOException, InterruptedException, ClassNotFoundException {
String jobName = "ScanFromConfig" + (start != null ? start.toUpperCase() : "Empty") + String jobName = "ScanFromConfig" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") +
"To" + (stop != null ? stop.toUpperCase() : "Empty"); "To" + (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty");
Configuration c = new Configuration(TEST_UTIL.getConfiguration()); Configuration c = new Configuration(TEST_UTIL.getConfiguration());
c.set(TableInputFormat.INPUT_TABLE, Bytes.toString(TABLE_NAME)); c.set(TableInputFormat.INPUT_TABLE, Bytes.toString(TABLE_NAME));
c.set(TableInputFormat.SCAN_COLUMN_FAMILY, Bytes.toString(INPUT_FAMILY)); c.set(TableInputFormat.SCAN_COLUMN_FAMILY, Bytes.toString(INPUT_FAMILY));
@ -214,8 +215,8 @@ public abstract class TestTableInputFormatScanBase {
*/ */
protected void testScan(String start, String stop, String last) protected void testScan(String start, String stop, String last)
throws IOException, InterruptedException, ClassNotFoundException { throws IOException, InterruptedException, ClassNotFoundException {
String jobName = "Scan" + (start != null ? start.toUpperCase() : "Empty") + String jobName = "Scan" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") +
"To" + (stop != null ? stop.toUpperCase() : "Empty"); "To" + (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty");
LOG.info("Before map/reduce startup - job " + jobName); LOG.info("Before map/reduce startup - job " + jobName);
Configuration c = new Configuration(TEST_UTIL.getConfiguration()); Configuration c = new Configuration(TEST_UTIL.getConfiguration());
Scan scan = new Scan(); Scan scan = new Scan();

View File

@ -24,6 +24,7 @@ import java.text.DecimalFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Locale;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
@ -142,7 +143,7 @@ public class DataBlockEncodingTool {
String s = super.toString(); String s = super.toString();
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append(s.charAt(0)); sb.append(s.charAt(0));
sb.append(s.substring(1).toLowerCase()); sb.append(s.substring(1).toLowerCase(Locale.ROOT));
return sb.toString(); return sb.toString();
} }
} }
@ -372,7 +373,7 @@ public class DataBlockEncodingTool {
private void benchmarkDefaultCompression(int totalSize, byte[] rawBuffer) private void benchmarkDefaultCompression(int totalSize, byte[] rawBuffer)
throws IOException { throws IOException {
benchmarkAlgorithm(compressionAlgorithm, benchmarkAlgorithm(compressionAlgorithm,
compressionAlgorithmName.toUpperCase(), rawBuffer, 0, totalSize); compressionAlgorithmName.toUpperCase(Locale.ROOT), rawBuffer, 0, totalSize);
} }
/** /**
@ -526,7 +527,7 @@ public class DataBlockEncodingTool {
* @throws IOException * @throws IOException
*/ */
public void displayStatistics() throws IOException { public void displayStatistics() throws IOException {
final String comprAlgo = compressionAlgorithmName.toUpperCase(); final String comprAlgo = compressionAlgorithmName.toUpperCase(Locale.ROOT);
long rawBytes = totalKeyLength + totalPrefixLength + totalValueLength; long rawBytes = totalKeyLength + totalPrefixLength + totalValueLength;
System.out.println("Raw data size:"); System.out.println("Raw data size:");
@ -694,7 +695,7 @@ public class DataBlockEncodingTool {
String compressionName = DEFAULT_COMPRESSION.getName(); String compressionName = DEFAULT_COMPRESSION.getName();
if (cmd.hasOption(OPT_ENCODING_ALGORITHM)) { if (cmd.hasOption(OPT_ENCODING_ALGORITHM)) {
compressionName = compressionName =
cmd.getOptionValue(OPT_ENCODING_ALGORITHM).toLowerCase(); cmd.getOptionValue(OPT_ENCODING_ALGORITHM).toLowerCase(Locale.ROOT);
} }
boolean doBenchmark = cmd.hasOption(OPT_MEASURE_THROUGHPUT); boolean doBenchmark = cmd.hasOption(OPT_MEASURE_THROUGHPUT);
boolean doVerify = !cmd.hasOption(OPT_OMIT_CORRECTNESS_TEST); boolean doVerify = !cmd.hasOption(OPT_OMIT_CORRECTNESS_TEST);

View File

@ -24,6 +24,7 @@ import java.security.SecureRandom;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.Properties; import java.util.Properties;
import java.util.Random; import java.util.Random;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
@ -124,7 +125,7 @@ public class LoadTestTool extends AbstractHBaseTool {
public static final String OPT_DEFERRED_LOG_FLUSH_USAGE = "Enable deferred log flush."; public static final String OPT_DEFERRED_LOG_FLUSH_USAGE = "Enable deferred log flush.";
public static final String OPT_DATA_BLOCK_ENCODING = public static final String OPT_DATA_BLOCK_ENCODING =
HColumnDescriptor.DATA_BLOCK_ENCODING.toLowerCase(); HColumnDescriptor.DATA_BLOCK_ENCODING.toLowerCase(Locale.ROOT);
public static final String OPT_INMEMORY = "in_memory"; public static final String OPT_INMEMORY = "in_memory";
public static final String OPT_USAGE_IN_MEMORY = "Tries to keep the HFiles of the CF " + public static final String OPT_USAGE_IN_MEMORY = "Tries to keep the HFiles of the CF " +

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.thrift; package org.apache.hadoop.hbase.thrift;
import java.util.Locale;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -79,7 +80,7 @@ public class HThreadedSelectorServerArgs extends TThreadedSelectorServer.Args {
int acceptQueueSizePerThread = conf.getInt( int acceptQueueSizePerThread = conf.getInt(
ACCEPT_QUEUE_SIZE_PER_THREAD_CONF_KEY, getAcceptQueueSizePerThread()); ACCEPT_QUEUE_SIZE_PER_THREAD_CONF_KEY, getAcceptQueueSizePerThread());
AcceptPolicy acceptPolicy = AcceptPolicy.valueOf(conf.get( AcceptPolicy acceptPolicy = AcceptPolicy.valueOf(conf.get(
ACCEPT_POLICY_CONF_KEY, getAcceptPolicy().toString()).toUpperCase()); ACCEPT_POLICY_CONF_KEY, getAcceptPolicy().toString()).toUpperCase(Locale.ROOT));
super.selectorThreads(selectorThreads) super.selectorThreads(selectorThreads)
.workerThreads(workerThreads) .workerThreads(workerThreads)

View File

@ -23,6 +23,7 @@ import static org.apache.hadoop.hbase.util.Bytes.getBytes;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Locale;
import java.util.TreeMap; import java.util.TreeMap;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
@ -59,7 +60,7 @@ public class ThriftUtilities {
static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in) static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in)
throws IllegalArgument { throws IllegalArgument {
Compression.Algorithm comp = Compression.Algorithm comp =
Compression.getCompressionAlgorithmByName(in.compression.toLowerCase()); Compression.getCompressionAlgorithmByName(in.compression.toLowerCase(Locale.ROOT));
BloomType bt = BloomType bt =
BloomType.valueOf(in.bloomFilterType); BloomType.valueOf(in.bloomFilterType);