HBASE-15889. String case conversions are locale-sensitive, used without locale
Signed-off-by: Sean Busbey <busbey@apache.org>
This commit is contained in:
parent
6d0e8b2a91
commit
878b1ea721
|
@ -21,6 +21,7 @@ import com.sun.javadoc.DocErrorReporter;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
class StabilityOptions {
|
||||
public static final String STABLE_OPTION = "-stable";
|
||||
|
@ -28,7 +29,7 @@ class StabilityOptions {
|
|||
public static final String UNSTABLE_OPTION = "-unstable";
|
||||
|
||||
public static Integer optionLength(String option) {
|
||||
String opt = option.toLowerCase();
|
||||
String opt = option.toLowerCase(Locale.ROOT);
|
||||
if (opt.equals(UNSTABLE_OPTION)) return 1;
|
||||
if (opt.equals(EVOLVING_OPTION)) return 1;
|
||||
if (opt.equals(STABLE_OPTION)) return 1;
|
||||
|
@ -37,7 +38,7 @@ class StabilityOptions {
|
|||
|
||||
public static void validOptions(String[][] options, DocErrorReporter reporter) {
|
||||
for (int i = 0; i < options.length; i++) {
|
||||
String opt = options[i][0].toLowerCase();
|
||||
String opt = options[i][0].toLowerCase(Locale.ROOT);
|
||||
if (opt.equals(UNSTABLE_OPTION)) {
|
||||
RootDocProcessor.stability = UNSTABLE_OPTION;
|
||||
} else if (opt.equals(EVOLVING_OPTION)) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.io.IOException;
|
|||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -477,11 +478,11 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
|
|||
setBlockCacheEnabled(blockCacheEnabled);
|
||||
setTimeToLive(timeToLive);
|
||||
setCompressionType(Compression.Algorithm.
|
||||
valueOf(compression.toUpperCase()));
|
||||
valueOf(compression.toUpperCase(Locale.ROOT)));
|
||||
setDataBlockEncoding(DataBlockEncoding.
|
||||
valueOf(dataBlockEncoding.toUpperCase()));
|
||||
valueOf(dataBlockEncoding.toUpperCase(Locale.ROOT)));
|
||||
setBloomFilterType(BloomType.
|
||||
valueOf(bloomFilter.toUpperCase()));
|
||||
valueOf(bloomFilter.toUpperCase(Locale.ROOT)));
|
||||
setBlocksize(blocksize);
|
||||
setScope(scope);
|
||||
}
|
||||
|
@ -603,7 +604,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
|
|||
if (n == null) {
|
||||
return Compression.Algorithm.NONE;
|
||||
}
|
||||
return Compression.Algorithm.valueOf(n.toUpperCase());
|
||||
return Compression.Algorithm.valueOf(n.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
/** @return compression type being used for the column family for major
|
||||
|
@ -613,7 +614,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
|
|||
if (n == null) {
|
||||
return getCompression();
|
||||
}
|
||||
return Compression.Algorithm.valueOf(n.toUpperCase());
|
||||
return Compression.Algorithm.valueOf(n.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
/** @return maximum number of versions */
|
||||
|
@ -708,7 +709,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
|
|||
* @return this (for chained invocation)
|
||||
*/
|
||||
public HColumnDescriptor setCompressionType(Compression.Algorithm type) {
|
||||
return setValue(COMPRESSION, type.getName().toUpperCase());
|
||||
return setValue(COMPRESSION, type.getName().toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -820,7 +821,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
|
|||
*/
|
||||
public HColumnDescriptor setCompactionCompressionType(
|
||||
Compression.Algorithm type) {
|
||||
return setValue(COMPRESSION_COMPACT, type.getName().toUpperCase());
|
||||
return setValue(COMPRESSION_COMPACT, type.getName().toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -847,7 +848,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
|
|||
String value = getValue(KEEP_DELETED_CELLS);
|
||||
if (value != null) {
|
||||
// toUpperCase for backwards compatibility
|
||||
return KeepDeletedCells.valueOf(value.toUpperCase());
|
||||
return KeepDeletedCells.valueOf(value.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
return DEFAULT_KEEP_DELETED;
|
||||
}
|
||||
|
@ -937,7 +938,7 @@ public class HColumnDescriptor implements WritableComparable<HColumnDescriptor>
|
|||
if (n == null) {
|
||||
n = DEFAULT_BLOOMFILTER;
|
||||
}
|
||||
return BloomType.valueOf(n.toUpperCase());
|
||||
return BloomType.valueOf(n.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
|
@ -229,7 +230,7 @@ public class ServerName implements Comparable<ServerName>, Serializable {
|
|||
*/
|
||||
static String getServerName(String hostName, int port, long startcode) {
|
||||
final StringBuilder name = new StringBuilder(hostName.length() + 1 + 5 + 1 + 13);
|
||||
name.append(hostName.toLowerCase());
|
||||
name.append(hostName.toLowerCase(Locale.ROOT));
|
||||
name.append(SERVERNAME_SEPARATOR);
|
||||
name.append(port);
|
||||
name.append(SERVERNAME_SEPARATOR);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.apache.hadoop.hbase.filter;
|
||||
|
||||
import com.google.protobuf.InvalidProtocolBufferException;
|
||||
import java.util.Locale;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hbase.exceptions.DeserializationException;
|
||||
|
@ -52,8 +53,8 @@ public class SubstringComparator extends ByteArrayComparable {
|
|||
* @param substr the substring
|
||||
*/
|
||||
public SubstringComparator(String substr) {
|
||||
super(Bytes.toBytes(substr.toLowerCase()));
|
||||
this.substr = substr.toLowerCase();
|
||||
super(Bytes.toBytes(substr.toLowerCase(Locale.ROOT)));
|
||||
this.substr = substr.toLowerCase(Locale.ROOT);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -63,7 +64,7 @@ public class SubstringComparator extends ByteArrayComparable {
|
|||
|
||||
@Override
|
||||
public int compareTo(byte[] value, int offset, int length) {
|
||||
return Bytes.toString(value, offset, length).toLowerCase().contains(substr) ? 0
|
||||
return Bytes.toString(value, offset, length).toLowerCase(Locale.ROOT).contains(substr) ? 0
|
||||
: 1;
|
||||
}
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ import java.util.ArrayList;
|
|||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
@ -250,7 +251,7 @@ public class AsyncRpcChannel {
|
|||
return new SaslClientHandler(realTicket, authMethod, token, serverPrincipal,
|
||||
client.fallbackAllowed,
|
||||
client.conf.get("hbase.rpc.protection",
|
||||
SaslUtil.QualityOfProtection.AUTHENTICATION.name().toLowerCase()),
|
||||
SaslUtil.QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)),
|
||||
new SaslClientHandler.SaslExceptionHandler() {
|
||||
@Override
|
||||
public void handle(int retryCount, Random random, Throwable cause) {
|
||||
|
@ -458,7 +459,7 @@ public class AsyncRpcChannel {
|
|||
throw new IOException("Can't obtain server Kerberos config key from SecurityInfo");
|
||||
}
|
||||
this.serverPrincipal = SecurityUtil.getServerPrincipal(client.conf.get(serverKey),
|
||||
address.getAddress().getCanonicalHostName().toLowerCase());
|
||||
address.getAddress().getCanonicalHostName().toLowerCase(Locale.ROOT));
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("RPC Server Kerberos principal name for service=" + serviceName + " is "
|
||||
+ serverPrincipal);
|
||||
|
|
|
@ -38,6 +38,7 @@ import java.security.PrivilegedExceptionAction;
|
|||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Random;
|
||||
|
@ -332,7 +333,7 @@ public class RpcClientImpl extends AbstractRpcClient {
|
|||
"Can't obtain server Kerberos config key from SecurityInfo");
|
||||
}
|
||||
serverPrincipal = SecurityUtil.getServerPrincipal(
|
||||
conf.get(serverKey), server.getAddress().getCanonicalHostName().toLowerCase());
|
||||
conf.get(serverKey), server.getAddress().getCanonicalHostName().toLowerCase(Locale.ROOT));
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("RPC Server Kerberos principal name for service="
|
||||
+ remoteId.getServiceName() + " is " + serverPrincipal);
|
||||
|
@ -613,7 +614,7 @@ public class RpcClientImpl extends AbstractRpcClient {
|
|||
final OutputStream out2) throws IOException {
|
||||
saslRpcClient = new HBaseSaslRpcClient(authMethod, token, serverPrincipal, fallbackAllowed,
|
||||
conf.get("hbase.rpc.protection",
|
||||
QualityOfProtection.AUTHENTICATION.name().toLowerCase()));
|
||||
QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)));
|
||||
return saslRpcClient.saslConnect(in2, out2);
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.security;
|
|||
import org.apache.commons.codec.binary.Base64;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
|
||||
|
@ -79,13 +80,13 @@ public class SaslUtil {
|
|||
*/
|
||||
public static QualityOfProtection getQop(String stringQop) {
|
||||
QualityOfProtection qop = null;
|
||||
if (QualityOfProtection.AUTHENTICATION.name().toLowerCase().equals(stringQop)
|
||||
if (QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT).equals(stringQop)
|
||||
|| QualityOfProtection.AUTHENTICATION.saslQop.equals(stringQop)) {
|
||||
qop = QualityOfProtection.AUTHENTICATION;
|
||||
} else if (QualityOfProtection.INTEGRITY.name().toLowerCase().equals(stringQop)
|
||||
} else if (QualityOfProtection.INTEGRITY.name().toLowerCase(Locale.ROOT).equals(stringQop)
|
||||
|| QualityOfProtection.INTEGRITY.saslQop.equals(stringQop)) {
|
||||
qop = QualityOfProtection.INTEGRITY;
|
||||
} else if (QualityOfProtection.PRIVACY.name().toLowerCase().equals(stringQop)
|
||||
} else if (QualityOfProtection.PRIVACY.name().toLowerCase(Locale.ROOT).equals(stringQop)
|
||||
|| QualityOfProtection.PRIVACY.saslQop.equals(stringQop)) {
|
||||
qop = QualityOfProtection.PRIVACY;
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.util.Collection;
|
|||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
@ -254,7 +255,7 @@ public class PoolMap<K, V> implements Map<K, V> {
|
|||
}
|
||||
|
||||
public static String fuzzyNormalize(String name) {
|
||||
return name != null ? name.replaceAll("-", "").trim().toLowerCase() : "";
|
||||
return name != null ? name.replaceAll("-", "").trim().toLowerCase(Locale.ROOT) : "";
|
||||
}
|
||||
|
||||
public static PoolType fuzzyMatch(String name) {
|
||||
|
|
|
@ -26,6 +26,7 @@ import java.net.SocketTimeoutException;
|
|||
import java.net.UnknownHostException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
@ -409,7 +410,7 @@ public class MetaTableLocator {
|
|||
} else if (cause != null && cause instanceof EOFException) {
|
||||
// Catch. Other end disconnected us.
|
||||
} else if (cause != null && cause.getMessage() != null &&
|
||||
cause.getMessage().toLowerCase().contains("connection reset")) {
|
||||
cause.getMessage().toLowerCase(Locale.ROOT).contains("connection reset")) {
|
||||
// Catch. Connection reset.
|
||||
} else {
|
||||
throw ioe;
|
||||
|
|
|
@ -30,6 +30,7 @@ import java.security.KeyStoreException;
|
|||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.UnrecoverableKeyException;
|
||||
import java.security.cert.CertificateException;
|
||||
import java.util.Locale;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
@ -142,7 +143,7 @@ public class KeyStoreKeyProvider implements KeyProvider {
|
|||
throw new RuntimeException("KeyProvider scheme should specify KeyStore type");
|
||||
}
|
||||
// KeyStore expects instance type specifications in uppercase
|
||||
store = KeyStore.getInstance(storeType.toUpperCase());
|
||||
store = KeyStore.getInstance(storeType.toUpperCase(Locale.ROOT));
|
||||
processParameters(uri);
|
||||
load(uri);
|
||||
} catch (URISyntaxException e) {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.apache.hadoop.hbase.ipc;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
||||
|
@ -48,7 +49,7 @@ public class MetricsHBaseServerSourceFactoryImpl extends MetricsHBaseServerSourc
|
|||
source = new MetricsHBaseServerSourceImpl(
|
||||
context,
|
||||
METRICS_DESCRIPTION,
|
||||
context.toLowerCase(),
|
||||
context.toLowerCase(Locale.ROOT),
|
||||
context + METRICS_JMX_CONTEXT_SUFFIX, wrap);
|
||||
|
||||
//Store back in storage
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.hadoop.metrics2.MetricsTag;
|
|||
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
@ -246,6 +247,6 @@ public class MetricsAssertHelperImpl implements MetricsAssertHelper {
|
|||
}
|
||||
|
||||
private String canonicalizeMetricName(String in) {
|
||||
return in.toLowerCase().replaceAll("[^A-Za-z0-9 ]", "");
|
||||
return in.toLowerCase(Locale.ROOT).replaceAll("[^A-Za-z0-9 ]", "");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase;
|
|||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
|
@ -205,7 +206,7 @@ public class HBaseClusterManager extends Configured implements ClusterManager {
|
|||
@Override
|
||||
public String getCommand(ServiceType service, Operation op) {
|
||||
return String.format("%s/bin/hbase-daemon.sh %s %s %s", hbaseHome, confDir,
|
||||
op.toString().toLowerCase(), service);
|
||||
op.toString().toLowerCase(Locale.ROOT), service);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -235,7 +236,7 @@ public class HBaseClusterManager extends Configured implements ClusterManager {
|
|||
@Override
|
||||
public String getCommand(ServiceType service, Operation op) {
|
||||
return String.format("%s/sbin/hadoop-daemon.sh %s %s %s", hadoopHome, confDir,
|
||||
op.toString().toLowerCase(), service);
|
||||
op.toString().toLowerCase(Locale.ROOT), service);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -264,7 +265,7 @@ public class HBaseClusterManager extends Configured implements ClusterManager {
|
|||
|
||||
@Override
|
||||
public String getCommand(ServiceType service, Operation op) {
|
||||
return String.format("%s/bin/zkServer.sh %s", zookeeperHome, op.toString().toLowerCase());
|
||||
return String.format("%s/bin/zkServer.sh %s", zookeeperHome, op.toString().toLowerCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -37,6 +37,7 @@ import javax.xml.ws.http.HTTPException;
|
|||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
|
@ -274,8 +275,8 @@ public class RESTApiClusterManager extends Configured implements ClusterManager
|
|||
if (role.get("hostRef").get("hostId").getTextValue().equals(hostId) &&
|
||||
role.get("type")
|
||||
.getTextValue()
|
||||
.toLowerCase()
|
||||
.equals(roleType.toLowerCase())) {
|
||||
.toLowerCase(Locale.ROOT)
|
||||
.equals(roleType.toLowerCase(Locale.ROOT))) {
|
||||
roleValue = role.get(property).getTextValue();
|
||||
break;
|
||||
}
|
||||
|
@ -328,7 +329,7 @@ public class RESTApiClusterManager extends Configured implements ClusterManager
|
|||
// APIs tend to take commands in lowercase, so convert them to save the trouble later.
|
||||
@Override
|
||||
public String toString() {
|
||||
return name().toLowerCase();
|
||||
return name().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -348,4 +349,4 @@ public class RESTApiClusterManager extends Configured implements ClusterManager
|
|||
private enum Service {
|
||||
HBASE, HDFS, MAPREDUCE
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.apache.hadoop.hbase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
|
@ -111,7 +112,7 @@ public class StripeCompactionsPerformanceEvaluation extends AbstractHBaseTool {
|
|||
} else {
|
||||
minValueSize = maxValueSize = Integer.parseInt(valueSize);
|
||||
}
|
||||
String datagen = cmd.getOptionValue(DATAGEN_KEY, "default").toLowerCase();
|
||||
String datagen = cmd.getOptionValue(DATAGEN_KEY, "default").toLowerCase(Locale.ROOT);
|
||||
if ("default".equals(datagen)) {
|
||||
dataGen = new MultiThreadedAction.DefaultDataGenerator(
|
||||
minValueSize, maxValueSize, 1, 1, new byte[][] { COLUMN_FAMILY });
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.rest.filter;
|
|||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.util.HashSet;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
|
@ -65,11 +66,11 @@ public class GzipFilter implements Filter {
|
|||
String acceptEncoding = request.getHeader("accept-encoding");
|
||||
String contentType = request.getHeader("content-type");
|
||||
if ((contentEncoding != null) &&
|
||||
(contentEncoding.toLowerCase().indexOf("gzip") > -1)) {
|
||||
(contentEncoding.toLowerCase(Locale.ROOT).indexOf("gzip") > -1)) {
|
||||
request = new GZIPRequestWrapper(request);
|
||||
}
|
||||
if (((acceptEncoding != null) &&
|
||||
(acceptEncoding.toLowerCase().indexOf("gzip") > -1)) ||
|
||||
(acceptEncoding.toLowerCase(Locale.ROOT).indexOf("gzip") > -1)) ||
|
||||
((contentType != null) && mimeTypes.contains(contentType))) {
|
||||
response = new GZIPResponseWrapper(response);
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.ipc;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.BlockingQueue;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
@ -221,7 +222,7 @@ public abstract class RpcExecutor {
|
|||
*/
|
||||
public void resizeQueues(Configuration conf) {
|
||||
String configKey = RpcScheduler.IPC_SERVER_MAX_CALLQUEUE_LENGTH;
|
||||
if (name != null && name.toLowerCase().contains("priority")) {
|
||||
if (name != null && name.toLowerCase(Locale.ROOT).contains("priority")) {
|
||||
configKey = RpcScheduler.IPC_SERVER_PRIORITY_MAX_CALLQUEUE_LENGTH;
|
||||
}
|
||||
currentQueueLimit = conf.getInt(configKey, currentQueueLimit);
|
||||
|
|
|
@ -28,6 +28,7 @@ import java.lang.reflect.Method;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import java.util.UUID;
|
||||
|
@ -391,7 +392,7 @@ public class Import {
|
|||
filter = instantiateFilter(conf);
|
||||
String durabilityStr = conf.get(WAL_DURABILITY);
|
||||
if(durabilityStr != null){
|
||||
durability = Durability.valueOf(durabilityStr.toUpperCase());
|
||||
durability = Durability.valueOf(durabilityStr.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
// TODO: This is kind of ugly doing setup of ZKW just to read the clusterid.
|
||||
ZooKeeperWatcher zkw = null;
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.mapreduce;
|
|||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
@ -253,7 +254,7 @@ implements Configurable {
|
|||
@Override
|
||||
public List<InputSplit> getSplits(JobContext context) throws IOException {
|
||||
List<InputSplit> splits = super.getSplits(context);
|
||||
if ((conf.get(SHUFFLE_MAPS) != null) && "true".equals(conf.get(SHUFFLE_MAPS).toLowerCase())) {
|
||||
if ((conf.get(SHUFFLE_MAPS) != null) && "true".equals(conf.get(SHUFFLE_MAPS).toLowerCase(Locale.ROOT))) {
|
||||
Collections.shuffle(splits);
|
||||
}
|
||||
return splits;
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
@ -61,7 +62,7 @@ implements RowProcessor<S,T> {
|
|||
|
||||
@Override
|
||||
public String getName() {
|
||||
return this.getClass().getSimpleName().toLowerCase();
|
||||
return this.getClass().getSimpleName().toLowerCase(Locale.ROOT);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -34,6 +34,7 @@ import java.util.HashMap;
|
|||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.NavigableMap;
|
||||
|
@ -8433,7 +8434,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
|
|||
}
|
||||
boolean majorCompact = false;
|
||||
if (args.length > 1) {
|
||||
if (!args[1].toLowerCase().startsWith("major")) {
|
||||
if (!args[1].toLowerCase(Locale.ROOT).startsWith("major")) {
|
||||
printUsageAndExit("ERROR: Unrecognized option <" + args[1] + ">");
|
||||
}
|
||||
majorCompact = true;
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.security;
|
|||
import java.io.ByteArrayInputStream;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
import javax.security.auth.callback.Callback;
|
||||
import javax.security.auth.callback.CallbackHandler;
|
||||
|
@ -50,7 +51,7 @@ public class HBaseSaslRpcServer {
|
|||
|
||||
public static void init(Configuration conf) {
|
||||
SaslUtil.initSaslProperties(conf.get("hbase.rpc.protection",
|
||||
QualityOfProtection.AUTHENTICATION.name().toLowerCase()));
|
||||
QualityOfProtection.AUTHENTICATION.name().toLowerCase(Locale.ROOT)));
|
||||
}
|
||||
|
||||
public static <T extends TokenIdentifier> T getIdentifier(String id,
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.hadoop.hbase.client.ConnectionFactory;
|
|||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
|
||||
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
|
||||
import java.util.Arrays;
|
||||
import java.util.Locale;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -66,7 +67,7 @@ public class CreateSnapshot extends AbstractHBaseTool {
|
|||
admin = connection.getAdmin();
|
||||
HBaseProtos.SnapshotDescription.Type type = HBaseProtos.SnapshotDescription.Type.FLUSH;
|
||||
if (snapshotType != null) {
|
||||
type = HBaseProtos.SnapshotDescription.Type.valueOf(snapshotName.toUpperCase());
|
||||
type = HBaseProtos.SnapshotDescription.Type.valueOf(snapshotName.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
|
||||
admin.snapshot(snapshotName, TableName.valueOf(tableName), type);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.apache.hadoop.hbase.util;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
|
@ -53,7 +54,7 @@ public class CompressionTest {
|
|||
private static final Log LOG = LogFactory.getLog(CompressionTest.class);
|
||||
|
||||
public static boolean testCompression(String codec) {
|
||||
codec = codec.toLowerCase();
|
||||
codec = codec.toLowerCase(Locale.ROOT);
|
||||
|
||||
Compression.Algorithm a;
|
||||
|
||||
|
@ -109,7 +110,7 @@ public class CompressionTest {
|
|||
|
||||
System.err.println(
|
||||
"Usage: CompressionTest <path> " +
|
||||
StringUtils.join( Compression.Algorithm.values(), "|").toLowerCase() +
|
||||
StringUtils.join( Compression.Algorithm.values(), "|").toLowerCase(Locale.ROOT) +
|
||||
"\n" +
|
||||
"For example:\n" +
|
||||
" hbase " + CompressionTest.class + " file:///tmp/testfile gz\n");
|
||||
|
|
|
@ -25,6 +25,7 @@ import java.lang.reflect.InvocationTargetException;
|
|||
import java.lang.reflect.Method;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import javax.management.JMException;
|
||||
import javax.management.MBeanServer;
|
||||
|
@ -87,7 +88,7 @@ public class DirectMemoryUtils {
|
|||
long multiplier = 1; //for the byte case.
|
||||
for (String s : arguments) {
|
||||
if (s.contains("-XX:MaxDirectMemorySize=")) {
|
||||
String memSize = s.toLowerCase()
|
||||
String memSize = s.toLowerCase(Locale.ROOT)
|
||||
.replace("-xx:maxdirectmemorysize=", "").trim();
|
||||
|
||||
if (memSize.contains("k")) {
|
||||
|
|
|
@ -35,6 +35,7 @@ import java.util.Collections;
|
|||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ArrayBlockingQueue;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
@ -120,7 +121,7 @@ public abstract class FSUtils {
|
|||
*/
|
||||
public static void setStoragePolicy(final FileSystem fs, final Configuration conf,
|
||||
final Path path, final String policyKey, final String defaultPolicy) {
|
||||
String storagePolicy = conf.get(policyKey, defaultPolicy).toUpperCase();
|
||||
String storagePolicy = conf.get(policyKey, defaultPolicy).toUpperCase(Locale.ROOT);
|
||||
if (storagePolicy.equals(defaultPolicy)) {
|
||||
if (LOG.isTraceEnabled()) {
|
||||
LOG.trace("default policy of " + defaultPolicy + " requested, exiting early.");
|
||||
|
@ -1910,7 +1911,7 @@ public abstract class FSUtils {
|
|||
return false;
|
||||
}
|
||||
|
||||
if (!regionName.toLowerCase().matches("[0-9a-f]+")) {
|
||||
if (!regionName.toLowerCase(Locale.ROOT).matches("[0-9a-f]+")) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
|
|
|
@ -44,6 +44,7 @@ import java.util.HashMap;
|
|||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
@ -4070,7 +4071,7 @@ public class HBaseFsck extends Configured implements Closeable {
|
|||
errors.progress();
|
||||
String encodedName = regionDir.getPath().getName();
|
||||
// ignore directories that aren't hexadecimal
|
||||
if (!encodedName.toLowerCase().matches("[0-9a-f]+")) {
|
||||
if (!encodedName.toLowerCase(Locale.ROOT).matches("[0-9a-f]+")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.lang.management.ManagementFactory;
|
|||
import java.lang.management.RuntimeMXBean;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.Locale;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -103,8 +104,8 @@ public abstract class ServerCommandLine extends Configured implements Tool {
|
|||
|
||||
nextEnv:
|
||||
for (Entry<String, String> entry : System.getenv().entrySet()) {
|
||||
String key = entry.getKey().toLowerCase();
|
||||
String value = entry.getValue().toLowerCase();
|
||||
String key = entry.getKey().toLowerCase(Locale.ROOT);
|
||||
String value = entry.getValue().toLowerCase(Locale.ROOT);
|
||||
// exclude variables which may contain skip words
|
||||
for(String skipWord : skipWords) {
|
||||
if (key.contains(skipWord) || value.contains(skipWord))
|
||||
|
|
|
@ -31,6 +31,7 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.LinkedList;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Queue;
|
||||
import java.util.Random;
|
||||
|
@ -312,8 +313,8 @@ public class PerformanceEvaluation extends Configured implements Tool {
|
|||
static boolean checkTable(Admin admin, TestOptions opts) throws IOException {
|
||||
TableName tableName = TableName.valueOf(opts.tableName);
|
||||
boolean needsDelete = false, exists = admin.tableExists(tableName);
|
||||
boolean isReadCmd = opts.cmdName.toLowerCase().contains("read")
|
||||
|| opts.cmdName.toLowerCase().contains("scan");
|
||||
boolean isReadCmd = opts.cmdName.toLowerCase(Locale.ROOT).contains("read")
|
||||
|| opts.cmdName.toLowerCase(Locale.ROOT).contains("scan");
|
||||
if (!exists && isReadCmd) {
|
||||
throw new IllegalStateException(
|
||||
"Must specify an existing table for read commands. Run a write command first.");
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.net.InetAddress;
|
|||
import java.net.InetSocketAddress;
|
||||
import java.net.ServerSocket;
|
||||
import java.nio.channels.ServerSocketChannel;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
@ -123,7 +124,7 @@ public class TestIPv6NIOServerSocketChannel {
|
|||
//java.net.SocketException: Address family not supported by protocol family
|
||||
//or java.net.SocketException: Protocol family not supported
|
||||
Assert.assertFalse(ex.getClass().isInstance(BindException.class));
|
||||
Assert.assertTrue(ex.getMessage().toLowerCase().contains("protocol family"));
|
||||
Assert.assertTrue(ex.getMessage().toLowerCase(Locale.ROOT).contains("protocol family"));
|
||||
LOG.info("Received expected exception:");
|
||||
LOG.info(ex);
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ import java.lang.management.ThreadMXBean;
|
|||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.runner.notification.Failure;
|
||||
|
@ -93,7 +94,7 @@ public class TimedOutTestsListener extends RunListener {
|
|||
thread.getPriority(),
|
||||
thread.getId(),
|
||||
Thread.State.WAITING.equals(thread.getState()) ?
|
||||
"in Object.wait()" : thread.getState().name().toLowerCase(),
|
||||
"in Object.wait()" : thread.getState().name().toLowerCase(Locale.ROOT),
|
||||
Thread.State.WAITING.equals(thread.getState()) ?
|
||||
"WAITING (on object monitor)" : thread.getState()));
|
||||
for (StackTraceElement stackTraceElement : e.getValue()) {
|
||||
|
|
|
@ -47,6 +47,7 @@ import java.io.File;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
|
||||
|
@ -230,8 +231,8 @@ public abstract class MultiTableInputFormatTestBase {
|
|||
private void testScan(String start, String stop, String last)
|
||||
throws IOException, InterruptedException, ClassNotFoundException {
|
||||
String jobName =
|
||||
"Scan" + (start != null ? start.toUpperCase() : "Empty") + "To" +
|
||||
(stop != null ? stop.toUpperCase() : "Empty");
|
||||
"Scan" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") + "To" +
|
||||
(stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty");
|
||||
LOG.info("Before map/reduce startup - job " + jobName);
|
||||
Configuration c = new Configuration(TEST_UTIL.getConfiguration());
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import static org.junit.Assert.assertTrue;
|
|||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -385,7 +386,7 @@ public class TestLoadIncrementalHFiles {
|
|||
// set real family name to upper case in purpose to simulate the case that
|
||||
// family name in HFiles is invalid
|
||||
HColumnDescriptor family =
|
||||
new HColumnDescriptor(Bytes.toBytes(new String(FAMILY).toUpperCase()));
|
||||
new HColumnDescriptor(Bytes.toBytes(new String(FAMILY).toUpperCase(Locale.ROOT)));
|
||||
htd.addFamily(family);
|
||||
|
||||
try {
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.io.File;
|
|||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
|
||||
|
@ -206,8 +207,8 @@ public class TestMultiTableInputFormat {
|
|||
private void testScan(String start, String stop, String last)
|
||||
throws IOException, InterruptedException, ClassNotFoundException {
|
||||
String jobName =
|
||||
"Scan" + (start != null ? start.toUpperCase() : "Empty") + "To" +
|
||||
(stop != null ? stop.toUpperCase() : "Empty");
|
||||
"Scan" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") + "To" +
|
||||
(stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty");
|
||||
LOG.info("Before map/reduce startup - job " + jobName);
|
||||
Configuration c = new Configuration(TEST_UTIL.getConfiguration());
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import static org.junit.Assert.assertTrue;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
|
||||
|
@ -177,8 +178,8 @@ public abstract class TestTableInputFormatScanBase {
|
|||
*/
|
||||
protected void testScanFromConfiguration(String start, String stop, String last)
|
||||
throws IOException, InterruptedException, ClassNotFoundException {
|
||||
String jobName = "ScanFromConfig" + (start != null ? start.toUpperCase() : "Empty") +
|
||||
"To" + (stop != null ? stop.toUpperCase() : "Empty");
|
||||
String jobName = "ScanFromConfig" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") +
|
||||
"To" + (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty");
|
||||
Configuration c = new Configuration(TEST_UTIL.getConfiguration());
|
||||
c.set(TableInputFormat.INPUT_TABLE, Bytes.toString(TABLE_NAME));
|
||||
c.set(TableInputFormat.SCAN_COLUMN_FAMILY, Bytes.toString(INPUT_FAMILY));
|
||||
|
@ -214,8 +215,8 @@ public abstract class TestTableInputFormatScanBase {
|
|||
*/
|
||||
protected void testScan(String start, String stop, String last)
|
||||
throws IOException, InterruptedException, ClassNotFoundException {
|
||||
String jobName = "Scan" + (start != null ? start.toUpperCase() : "Empty") +
|
||||
"To" + (stop != null ? stop.toUpperCase() : "Empty");
|
||||
String jobName = "Scan" + (start != null ? start.toUpperCase(Locale.ROOT) : "Empty") +
|
||||
"To" + (stop != null ? stop.toUpperCase(Locale.ROOT) : "Empty");
|
||||
LOG.info("Before map/reduce startup - job " + jobName);
|
||||
Configuration c = new Configuration(TEST_UTIL.getConfiguration());
|
||||
Scan scan = new Scan();
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.text.DecimalFormat;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.commons.cli.CommandLineParser;
|
||||
|
@ -142,7 +143,7 @@ public class DataBlockEncodingTool {
|
|||
String s = super.toString();
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(s.charAt(0));
|
||||
sb.append(s.substring(1).toLowerCase());
|
||||
sb.append(s.substring(1).toLowerCase(Locale.ROOT));
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
|
@ -372,7 +373,7 @@ public class DataBlockEncodingTool {
|
|||
private void benchmarkDefaultCompression(int totalSize, byte[] rawBuffer)
|
||||
throws IOException {
|
||||
benchmarkAlgorithm(compressionAlgorithm,
|
||||
compressionAlgorithmName.toUpperCase(), rawBuffer, 0, totalSize);
|
||||
compressionAlgorithmName.toUpperCase(Locale.ROOT), rawBuffer, 0, totalSize);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -526,7 +527,7 @@ public class DataBlockEncodingTool {
|
|||
* @throws IOException
|
||||
*/
|
||||
public void displayStatistics() throws IOException {
|
||||
final String comprAlgo = compressionAlgorithmName.toUpperCase();
|
||||
final String comprAlgo = compressionAlgorithmName.toUpperCase(Locale.ROOT);
|
||||
long rawBytes = totalKeyLength + totalPrefixLength + totalValueLength;
|
||||
|
||||
System.out.println("Raw data size:");
|
||||
|
@ -694,7 +695,7 @@ public class DataBlockEncodingTool {
|
|||
String compressionName = DEFAULT_COMPRESSION.getName();
|
||||
if (cmd.hasOption(OPT_ENCODING_ALGORITHM)) {
|
||||
compressionName =
|
||||
cmd.getOptionValue(OPT_ENCODING_ALGORITHM).toLowerCase();
|
||||
cmd.getOptionValue(OPT_ENCODING_ALGORITHM).toLowerCase(Locale.ROOT);
|
||||
}
|
||||
boolean doBenchmark = cmd.hasOption(OPT_MEASURE_THROUGHPUT);
|
||||
boolean doVerify = !cmd.hasOption(OPT_OMIT_CORRECTNESS_TEST);
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.security.SecureRandom;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Properties;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
@ -124,7 +125,7 @@ public class LoadTestTool extends AbstractHBaseTool {
|
|||
public static final String OPT_DEFERRED_LOG_FLUSH_USAGE = "Enable deferred log flush.";
|
||||
|
||||
public static final String OPT_DATA_BLOCK_ENCODING =
|
||||
HColumnDescriptor.DATA_BLOCK_ENCODING.toLowerCase();
|
||||
HColumnDescriptor.DATA_BLOCK_ENCODING.toLowerCase(Locale.ROOT);
|
||||
|
||||
public static final String OPT_INMEMORY = "in_memory";
|
||||
public static final String OPT_USAGE_IN_MEMORY = "Tries to keep the HFiles of the CF " +
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.apache.hadoop.hbase.thrift;
|
||||
|
||||
import java.util.Locale;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
@ -79,7 +80,7 @@ public class HThreadedSelectorServerArgs extends TThreadedSelectorServer.Args {
|
|||
int acceptQueueSizePerThread = conf.getInt(
|
||||
ACCEPT_QUEUE_SIZE_PER_THREAD_CONF_KEY, getAcceptQueueSizePerThread());
|
||||
AcceptPolicy acceptPolicy = AcceptPolicy.valueOf(conf.get(
|
||||
ACCEPT_POLICY_CONF_KEY, getAcceptPolicy().toString()).toUpperCase());
|
||||
ACCEPT_POLICY_CONF_KEY, getAcceptPolicy().toString()).toUpperCase(Locale.ROOT));
|
||||
|
||||
super.selectorThreads(selectorThreads)
|
||||
.workerThreads(workerThreads)
|
||||
|
|
|
@ -23,6 +23,7 @@ import static org.apache.hadoop.hbase.util.Bytes.getBytes;
|
|||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import org.apache.hadoop.hbase.classification.InterfaceAudience;
|
||||
|
@ -59,7 +60,7 @@ public class ThriftUtilities {
|
|||
static public HColumnDescriptor colDescFromThrift(ColumnDescriptor in)
|
||||
throws IllegalArgument {
|
||||
Compression.Algorithm comp =
|
||||
Compression.getCompressionAlgorithmByName(in.compression.toLowerCase());
|
||||
Compression.getCompressionAlgorithmByName(in.compression.toLowerCase(Locale.ROOT));
|
||||
BloomType bt =
|
||||
BloomType.valueOf(in.bloomFilterType);
|
||||
|
||||
|
|
Loading…
Reference in New Issue