HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa)

This commit is contained in:
Tsuyoshi Ozawa 2015-02-19 12:46:46 +09:00
parent 18fb421fab
commit 946456c6d8
98 changed files with 331 additions and 191 deletions

View File

@ -21,6 +21,7 @@ import com.sun.javadoc.DocErrorReporter;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
class StabilityOptions {
public static final String STABLE_OPTION = "-stable";
@ -28,7 +29,7 @@ class StabilityOptions {
public static final String UNSTABLE_OPTION = "-unstable";
public static Integer optionLength(String option) {
String opt = option.toLowerCase();
String opt = option.toLowerCase(Locale.ENGLISH);
if (opt.equals(UNSTABLE_OPTION)) return 1;
if (opt.equals(EVOLVING_OPTION)) return 1;
if (opt.equals(STABLE_OPTION)) return 1;
@ -38,7 +39,7 @@ class StabilityOptions {
public static void validOptions(String[][] options,
DocErrorReporter reporter) {
for (int i = 0; i < options.length; i++) {
String opt = options[i][0].toLowerCase();
String opt = options[i][0].toLowerCase(Locale.ENGLISH);
if (opt.equals(UNSTABLE_OPTION)) {
RootDocProcessor.stability = UNSTABLE_OPTION;
} else if (opt.equals(EVOLVING_OPTION)) {

View File

@ -14,6 +14,7 @@
package org.apache.hadoop.security.authentication.server;
import java.io.IOException;
import java.util.Locale;
import java.util.Properties;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
@ -68,7 +69,8 @@ public abstract class AltKerberosAuthenticationHandler
NON_BROWSER_USER_AGENTS, NON_BROWSER_USER_AGENTS_DEFAULT)
.split("\\W*,\\W*");
for (int i = 0; i < nonBrowserUserAgents.length; i++) {
nonBrowserUserAgents[i] = nonBrowserUserAgents[i].toLowerCase();
nonBrowserUserAgents[i] =
nonBrowserUserAgents[i].toLowerCase(Locale.ENGLISH);
}
}
@ -120,7 +122,7 @@ public abstract class AltKerberosAuthenticationHandler
if (userAgent == null) {
return false;
}
userAgent = userAgent.toLowerCase();
userAgent = userAgent.toLowerCase(Locale.ENGLISH);
boolean isBrowser = true;
for (String nonBrowserUserAgent : nonBrowserUserAgents) {
if (userAgent.contains(nonBrowserUserAgent)) {

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import org.apache.directory.server.kerberos.shared.keytab.Keytab;
@ -58,24 +59,25 @@ public class TestKerberosUtil {
// send null hostname
Assert.assertEquals("When no hostname is sent",
service + "/" + localHostname.toLowerCase(),
service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
KerberosUtil.getServicePrincipal(service, null));
// send empty hostname
Assert.assertEquals("When empty hostname is sent",
service + "/" + localHostname.toLowerCase(),
service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
KerberosUtil.getServicePrincipal(service, ""));
// send 0.0.0.0 hostname
Assert.assertEquals("When 0.0.0.0 hostname is sent",
service + "/" + localHostname.toLowerCase(),
service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
KerberosUtil.getServicePrincipal(service, "0.0.0.0"));
// send uppercase hostname
Assert.assertEquals("When uppercase hostname is sent",
service + "/" + testHost.toLowerCase(),
service + "/" + testHost.toLowerCase(Locale.ENGLISH),
KerberosUtil.getServicePrincipal(service, testHost));
// send lowercase hostname
Assert.assertEquals("When lowercase hostname is sent",
service + "/" + testHost.toLowerCase(),
KerberosUtil.getServicePrincipal(service, testHost.toLowerCase()));
service + "/" + testHost.toLowerCase(Locale.ENGLISH),
KerberosUtil.getServicePrincipal(
service, testHost.toLowerCase(Locale.ENGLISH)));
}
@Test

View File

@ -405,6 +405,8 @@ Trunk (Unreleased)
HADOOP-11585. Fix formatting in Tracing.md (Masatake Iwasaki via aw)
HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa)
OPTIMIZATIONS
HADOOP-7761. Improve the performance of raw comparisons. (todd)

View File

@ -46,6 +46,7 @@ import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
@ -1451,7 +1452,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
return defaultValue;
}
valueString = valueString.toLowerCase();
valueString = valueString.toLowerCase(Locale.ENGLISH);
if ("true".equals(valueString))
return true;

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.crypto;
import java.util.Locale;
import org.apache.hadoop.classification.InterfaceAudience;
/**
@ -97,7 +98,7 @@ public enum CipherSuite {
String[] parts = name.split("/");
StringBuilder suffix = new StringBuilder();
for (String part : parts) {
suffix.append(".").append(part.toLowerCase());
suffix.append(".").append(part.toLowerCase(Locale.ENGLISH));
}
return suffix.toString();

View File

@ -53,6 +53,7 @@ import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
@ -422,7 +423,7 @@ public class JavaKeyStoreProvider extends KeyProvider {
@Override
public KeyVersion createKey(String name, byte[] material,
Options options) throws IOException {
Preconditions.checkArgument(name.equals(name.toLowerCase()),
Preconditions.checkArgument(name.equals(name.toLowerCase(Locale.ENGLISH)),
"Uppercase key names are unsupported: %s", name);
writeLock.lock();
try {

View File

@ -33,6 +33,7 @@ import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.ServiceLoader;
@ -2795,8 +2796,10 @@ public abstract class FileSystem extends Configured implements Closeable {
}
Key(URI uri, Configuration conf, long unique) throws IOException {
scheme = uri.getScheme()==null?"":uri.getScheme().toLowerCase();
authority = uri.getAuthority()==null?"":uri.getAuthority().toLowerCase();
scheme = uri.getScheme() == null ?
"" : uri.getScheme().toLowerCase(Locale.ENGLISH);
authority = uri.getAuthority() == null ?
"" : uri.getAuthority().toLowerCase(Locale.ENGLISH);
this.unique = unique;
this.ugi = UserGroupInformation.getCurrentUser();

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.fs.permission;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Locale;
import com.google.common.base.Objects;
@ -106,7 +107,7 @@ public class AclEntry {
sb.append("default:");
}
if (type != null) {
sb.append(type.toString().toLowerCase());
sb.append(type.toString().toLowerCase(Locale.ENGLISH));
}
sb.append(':');
if (name != null) {
@ -263,7 +264,8 @@ public class AclEntry {
AclEntryType aclType = null;
try {
aclType = Enum.valueOf(AclEntryType.class, split[index].toUpperCase());
aclType = Enum.valueOf(
AclEntryType.class, split[index].toUpperCase(Locale.ENGLISH));
builder.setType(aclType);
index++;
} catch (IllegalArgumentException iae) {

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.fs.shell.find;
import java.io.IOException;
import java.util.Deque;
import java.util.Locale;
import org.apache.hadoop.fs.GlobPattern;
import org.apache.hadoop.fs.shell.PathData;
@ -73,7 +74,7 @@ final class Name extends BaseExpression {
public void prepare() throws IOException {
String argPattern = getArgument(1);
if (!caseSensitive) {
argPattern = argPattern.toLowerCase();
argPattern = argPattern.toLowerCase(Locale.ENGLISH);
}
globPattern = new GlobPattern(argPattern);
}
@ -82,7 +83,7 @@ final class Name extends BaseExpression {
public Result apply(PathData item, int depth) throws IOException {
String name = getPath(item).getName();
if (!caseSensitive) {
name = name.toLowerCase();
name = name.toLowerCase(Locale.ENGLISH);
}
if (globPattern.matches(name)) {
return Result.PASS;

View File

@ -65,10 +65,10 @@ public class CompressionCodecFactory {
codecsByClassName.put(codec.getClass().getCanonicalName(), codec);
String codecName = codec.getClass().getSimpleName();
codecsByName.put(codecName.toLowerCase(), codec);
codecsByName.put(codecName.toLowerCase(Locale.ENGLISH), codec);
if (codecName.endsWith("Codec")) {
codecName = codecName.substring(0, codecName.length() - "Codec".length());
codecsByName.put(codecName.toLowerCase(), codec);
codecsByName.put(codecName.toLowerCase(Locale.ENGLISH), codec);
}
}
@ -240,7 +240,7 @@ public class CompressionCodecFactory {
CompressionCodec codec = getCodecByClassName(codecName);
if (codec == null) {
// trying to get the codec by name in case the name was specified instead a class
codec = codecsByName.get(codecName.toLowerCase());
codec = codecsByName.get(codecName.toLowerCase(Locale.ENGLISH));
}
return codec;
}

View File

@ -85,12 +85,13 @@ class MetricsConfig extends SubsetConfiguration {
private ClassLoader pluginLoader;
MetricsConfig(Configuration c, String prefix) {
super(c, prefix.toLowerCase(Locale.US), ".");
super(c, prefix.toLowerCase(Locale.ENGLISH), ".");
}
static MetricsConfig create(String prefix) {
return loadFirst(prefix, "hadoop-metrics2-"+ prefix.toLowerCase(Locale.US)
+".properties", DEFAULT_FILE_NAME);
return loadFirst(prefix, "hadoop-metrics2" + "-"
+ prefix.toLowerCase(Locale.ENGLISH)
+".properties", DEFAULT_FILE_NAME);
}
static MetricsConfig create(String prefix, String... fileNames) {

View File

@ -617,6 +617,6 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource {
String m = System.getProperty(MS_INIT_MODE_KEY);
String m2 = m == null ? System.getenv(MS_INIT_MODE_KEY) : m;
return InitMode.valueOf((m2 == null ? InitMode.NORMAL.name() : m2)
.toUpperCase(Locale.US));
.toUpperCase(Locale.ENGLISH));
}
}

View File

@ -182,7 +182,8 @@ public class SecurityUtil {
if (fqdn == null || fqdn.isEmpty() || fqdn.equals("0.0.0.0")) {
fqdn = getLocalHostName();
}
return components[0] + "/" + fqdn.toLowerCase(Locale.US) + "@" + components[2];
return components[0] + "/" + fqdn.toLowerCase(Locale.ENGLISH) + "@"
+ components[2];
}
static String getLocalHostName() throws UnknownHostException {
@ -379,7 +380,7 @@ public class SecurityUtil {
}
host = addr.getAddress().getHostAddress();
} else {
host = addr.getHostName().toLowerCase();
host = addr.getHostName().toLowerCase(Locale.ENGLISH);
}
return new Text(host + ":" + addr.getPort());
}

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.security;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Locale;
import java.util.Map;
import java.util.TreeMap;
@ -138,7 +139,8 @@ public class WhitelistBasedResolver extends SaslPropertiesResolver {
QualityOfProtection.PRIVACY.toString());
for (int i=0; i < qop.length; i++) {
qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase()).getSaslQop();
qop[i] = QualityOfProtection.valueOf(
qop[i].toUpperCase(Locale.ENGLISH)).getSaslQop();
}
saslProps.put(Sasl.QOP, StringUtils.join(",", qop));

View File

@ -33,6 +33,7 @@ import java.io.InputStream;
import java.security.GeneralSecurityException;
import java.security.KeyStore;
import java.text.MessageFormat;
import java.util.Locale;
/**
* {@link KeyStoresFactory} implementation that reads the certificates from
@ -94,7 +95,8 @@ public class FileBasedKeyStoresFactory implements KeyStoresFactory {
@VisibleForTesting
public static String resolvePropertyName(SSLFactory.Mode mode,
String template) {
return MessageFormat.format(template, mode.toString().toLowerCase());
return MessageFormat.format(
template, mode.toString().toLowerCase(Locale.ENGLISH));
}
/**

View File

@ -33,6 +33,7 @@ import javax.net.ssl.SSLSocketFactory;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.security.GeneralSecurityException;
import java.util.Locale;
/**
* Factory that creates SSLEngine and SSLSocketFactory instances using
@ -138,7 +139,7 @@ public class SSLFactory implements ConnectionConfigurator {
private HostnameVerifier getHostnameVerifier(Configuration conf)
throws GeneralSecurityException, IOException {
return getHostnameVerifier(conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").
trim().toUpperCase());
trim().toUpperCase(Locale.ENGLISH));
}
public static HostnameVerifier getHostnameVerifier(String verifier)

View File

@ -41,6 +41,7 @@ import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.TreeSet;
@ -365,7 +366,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
buf.append('<');
for (int i = 0; i < hosts.length; i++) {
String h = hosts[i];
h = h != null ? h.trim().toLowerCase() : "";
h = h != null ? h.trim().toLowerCase(Locale.ENGLISH) : "";
hosts[i] = h;
if (i > 0) {
buf.append('/');
@ -406,7 +407,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
out:
for (Iterator<String> it = names.iterator(); it.hasNext();) {
// Don't trim the CN, though!
final String cn = it.next().toLowerCase();
final String cn = it.next().toLowerCase(Locale.ENGLISH);
// Store CN in StringBuffer in case we need to report an error.
buf.append(" <");
buf.append(cn);
@ -424,7 +425,8 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
acceptableCountryWildcard(cn);
for (int i = 0; i < hosts.length; i++) {
final String hostName = hosts[i].trim().toLowerCase();
final String hostName =
hosts[i].trim().toLowerCase(Locale.ENGLISH);
if (doWildcard) {
match = hostName.endsWith(cn.substring(1));
if (match && strictWithSubDomains) {
@ -479,7 +481,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
}
public static boolean isLocalhost(String host) {
host = host != null ? host.trim().toLowerCase() : "";
host = host != null ? host.trim().toLowerCase(Locale.ENGLISH) : "";
if (host.startsWith("::1")) {
int x = host.lastIndexOf('%');
if (x >= 0) {

View File

@ -23,6 +23,7 @@ import java.text.MessageFormat;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
@ -169,7 +170,7 @@ public abstract class DelegationTokenAuthenticationHandler
boolean requestContinues = true;
String op = ServletUtils.getParameter(request,
KerberosDelegationTokenAuthenticator.OP_PARAM);
op = (op != null) ? op.toUpperCase() : null;
op = (op != null) ? op.toUpperCase(Locale.ENGLISH) : null;
if (DELEGATION_TOKEN_OPS.contains(op) &&
!request.getMethod().equals("OPTIONS")) {
KerberosDelegationTokenAuthenticator.DelegationTokenOperation dtOp =

View File

@ -37,6 +37,7 @@ import java.net.InetSocketAddress;
import java.net.URL;
import java.net.URLEncoder;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
/**
@ -286,8 +287,8 @@ public abstract class DelegationTokenAuthenticator implements Authenticator {
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
if (hasResponse) {
String contentType = conn.getHeaderField(CONTENT_TYPE);
contentType = (contentType != null) ? contentType.toLowerCase()
: null;
contentType = (contentType != null) ?
contentType.toLowerCase(Locale.ENGLISH) : null;
if (contentType != null &&
contentType.contains(APPLICATION_JSON_MIME)) {
try {

View File

@ -901,7 +901,7 @@ public class StringUtils {
*/
public static String camelize(String s) {
StringBuilder sb = new StringBuilder();
String[] words = split(s.toLowerCase(Locale.US), ESCAPE_CHAR, '_');
String[] words = split(s.toLowerCase(Locale.ENGLISH), ESCAPE_CHAR, '_');
for (String word : words)
sb.append(org.apache.commons.lang.StringUtils.capitalize(word));

View File

@ -43,6 +43,7 @@ import java.net.SocketTimeoutException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Random;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CountDownLatch;
@ -1296,7 +1297,7 @@ public class TestIPC {
StringBuilder hexString = new StringBuilder();
for (String line : hexdump.toUpperCase().split("\n")) {
for (String line : hexdump.toUpperCase(Locale.ENGLISH).split("\n")) {
hexString.append(line.substring(0, LAST_HEX_COL).replace(" ", ""));
}
return StringUtils.hexStringToByte(hexString.toString());

View File

@ -41,6 +41,7 @@ import java.security.Security;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
@ -181,7 +182,7 @@ public class TestSaslRPC {
StringBuilder sb = new StringBuilder();
int i = 0;
for (QualityOfProtection qop:qops){
sb.append(qop.name().toLowerCase());
sb.append(qop.name().toLowerCase(Locale.ENGLISH));
if (++i < qops.length){
sb.append(",");
}

View File

@ -103,13 +103,13 @@ public class TestSecurityUtil {
String realm = "@REALM";
String principalInConf = service + SecurityUtil.HOSTNAME_PATTERN + realm;
String hostname = "FooHost";
String principal = service + hostname.toLowerCase() + realm;
String principal = service + hostname.toLowerCase(Locale.ENGLISH) + realm;
verify(principalInConf, hostname, principal);
}
@Test
public void testLocalHostNameForNullOrWild() throws Exception {
String local = SecurityUtil.getLocalHostName().toLowerCase(Locale.US);
String local = SecurityUtil.getLocalHostName().toLowerCase(Locale.ENGLISH);
assertEquals("hdfs/" + local + "@REALM",
SecurityUtil.getServerPrincipal("hdfs/_HOST@REALM", (String)null));
assertEquals("hdfs/" + local + "@REALM",
@ -260,7 +260,7 @@ public class TestSecurityUtil {
//LOG.info("address:"+addr+" host:"+host+" ip:"+ip+" port:"+port);
SecurityUtil.setTokenServiceUseIp(useIp);
String serviceHost = useIp ? ip : host.toLowerCase();
String serviceHost = useIp ? ip : host.toLowerCase(Locale.ENGLISH);
Token<?> token = new Token<TokenIdentifier>();
Text service = new Text(serviceHost+":"+port);

View File

@ -41,6 +41,7 @@ import java.security.PrivilegedExceptionAction;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.LinkedHashSet;
import java.util.Locale;
import java.util.Set;
import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_USER_GROUP_METRICS_PERCENTILES_INTERVALS;
@ -213,7 +214,7 @@ public class TestUserGroupInformation {
userName = userName.substring(sp + 1);
}
// user names are case insensitive on Windows. Make consistent
userName = userName.toLowerCase();
userName = userName.toLowerCase(Locale.ENGLISH);
}
// get the groups
pp = Runtime.getRuntime().exec(Shell.WINDOWS ?
@ -233,7 +234,7 @@ public class TestUserGroupInformation {
String loginUserName = login.getShortUserName();
if(Shell.WINDOWS) {
// user names are case insensitive on Windows. Make consistent
loginUserName = loginUserName.toLowerCase();
loginUserName = loginUserName.toLowerCase(Locale.ENGLISH);
}
assertEquals(userName, loginUserName);

View File

@ -27,6 +27,7 @@ import java.lang.management.ThreadMXBean;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.Map;
import org.junit.runner.notification.Failure;
@ -93,7 +94,8 @@ public class TimedOutTestsListener extends RunListener {
thread.getPriority(),
thread.getId(),
Thread.State.WAITING.equals(thread.getState()) ?
"in Object.wait()" : thread.getState().name().toLowerCase(),
"in Object.wait()" :
thread.getState().name().toLowerCase(Locale.ENGLISH),
Thread.State.WAITING.equals(thread.getState()) ?
"WAITING (on object monitor)" : thread.getState()));
for (StackTraceElement stackTraceElement : e.getValue()) {

View File

@ -27,6 +27,7 @@ import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Locale;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
@ -382,8 +383,10 @@ public class TestWinUtils {
private void assertOwners(File file, String expectedUser,
String expectedGroup) throws IOException {
String [] args = lsF(file).trim().split("[\\|]");
assertEquals(expectedUser.toLowerCase(), args[2].toLowerCase());
assertEquals(expectedGroup.toLowerCase(), args[3].toLowerCase());
assertEquals(expectedUser.toLowerCase(Locale.ENGLISH),
args[2].toLowerCase(Locale.ENGLISH));
assertEquals(expectedGroup.toLowerCase(Locale.ENGLISH),
args[3].toLowerCase(Locale.ENGLISH));
}
@Test (timeout = 30000)

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.nfs;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
@ -359,10 +360,10 @@ public class NfsExports {
AccessPrivilege privilege = AccessPrivilege.READ_ONLY;
switch (parts.length) {
case 1:
host = parts[0].toLowerCase().trim();
host = parts[0].toLowerCase(Locale.ENGLISH).trim();
break;
case 2:
host = parts[0].toLowerCase().trim();
host = parts[0].toLowerCase(Locale.ENGLISH).trim();
String option = parts[1].trim();
if ("rw".equalsIgnoreCase(option)) {
privilege = AccessPrivilege.READ_WRITE;

View File

@ -32,6 +32,7 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
/**
@ -82,7 +83,8 @@ public class CheckUploadContentTypeFilter implements Filter {
String method = httpReq.getMethod();
if (method.equals("PUT") || method.equals("POST")) {
String op = httpReq.getParameter(HttpFSFileSystem.OP_PARAM);
if (op != null && UPLOAD_OPERATIONS.contains(op.toUpperCase())) {
if (op != null &&
UPLOAD_OPERATIONS.contains(op.toUpperCase(Locale.ENGLISH))) {
if ("true".equalsIgnoreCase(httpReq.getParameter(HttpFSParametersProvider.DataParam.NAME))) {
String contentType = httpReq.getContentType();
contentTypeOK =

View File

@ -43,6 +43,7 @@ import java.io.OutputStream;
import java.util.EnumSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
@ -527,7 +528,8 @@ public class FSOperations {
@Override
public JSONObject execute(FileSystem fs) throws IOException {
boolean deleted = fs.delete(path, recursive);
return toJSON(HttpFSFileSystem.DELETE_JSON.toLowerCase(), deleted);
return toJSON(
HttpFSFileSystem.DELETE_JSON.toLowerCase(Locale.ENGLISH), deleted);
}
}

View File

@ -33,6 +33,7 @@ import org.apache.hadoop.lib.wsrs.StringParam;
import javax.ws.rs.ext.Provider;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.regex.Pattern;
@ -167,7 +168,8 @@ public class HttpFSParametersProvider extends ParametersProvider {
*/
public OperationParam(String operation) {
super(NAME, HttpFSFileSystem.Operation.class,
HttpFSFileSystem.Operation.valueOf(operation.toUpperCase()));
HttpFSFileSystem.Operation.valueOf(
operation.toUpperCase(Locale.ENGLISH)));
}
}

View File

@ -36,6 +36,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
@ -202,7 +203,7 @@ public class Server {
* @param config server configuration.
*/
public Server(String name, String homeDir, String configDir, String logDir, String tempDir, Configuration config) {
this.name = Check.notEmpty(name, "name").trim().toLowerCase();
this.name = Check.notEmpty(name, "name").trim().toLowerCase(Locale.ENGLISH);
this.homeDir = Check.notEmpty(homeDir, "homeDir");
this.configDir = Check.notEmpty(configDir, "configDir");
this.logDir = Check.notEmpty(logDir, "logDir");

View File

@ -43,6 +43,7 @@ import java.net.URI;
import java.security.PrivilegedExceptionAction;
import java.util.Collection;
import java.util.HashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
@ -254,7 +255,7 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
private Set<String> toLowerCase(Collection<String> collection) {
Set<String> set = new HashSet<String>();
for (String value : collection) {
set.add(value.toLowerCase());
set.add(value.toLowerCase(Locale.ENGLISH));
}
return set;
}
@ -300,7 +301,7 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
protected void validateNamenode(String namenode) throws FileSystemAccessException {
if (nameNodeWhitelist.size() > 0 && !nameNodeWhitelist.contains("*")) {
if (!nameNodeWhitelist.contains(namenode.toLowerCase())) {
if (!nameNodeWhitelist.contains(namenode.toLowerCase(Locale.ENGLISH))) {
throw new FileSystemAccessException(FileSystemAccessException.ERROR.H05, namenode, "not in whitelist");
}
}

View File

@ -22,6 +22,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.util.StringUtils;
import java.util.Arrays;
import java.util.Locale;
@InterfaceAudience.Private
public abstract class EnumParam<E extends Enum<E>> extends Param<E> {
@ -34,7 +35,7 @@ public abstract class EnumParam<E extends Enum<E>> extends Param<E> {
@Override
protected E parse(String str) throws Exception {
return Enum.valueOf(klass, str.toUpperCase());
return Enum.valueOf(klass, str.toUpperCase(Locale.ENGLISH));
}
@Override

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.lib.wsrs;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.Locale;
import org.apache.hadoop.classification.InterfaceAudience;
@ -37,7 +38,7 @@ public abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>>
final EnumSet<E> set = EnumSet.noneOf(klass);
if (!str.isEmpty()) {
for (String sub : str.split(",")) {
set.add(Enum.valueOf(klass, sub.trim().toUpperCase()));
set.add(Enum.valueOf(klass, sub.trim().toUpperCase(Locale.ENGLISH)));
}
}
return set;

View File

@ -33,6 +33,7 @@ import java.lang.reflect.Type;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
/**
@ -70,7 +71,7 @@ public class ParametersProvider
}
Enum op;
try {
op = Enum.valueOf(enumClass, str.toUpperCase());
op = Enum.valueOf(enumClass, str.toUpperCase(Locale.ENGLISH));
} catch (IllegalArgumentException ex) {
throw new IllegalArgumentException(
MessageFormat.format("Invalid Operation [{0}]", str));

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hdfs;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -78,7 +79,7 @@ public enum StorageType {
}
public static StorageType parseStorageType(String s) {
return StorageType.valueOf(s.toUpperCase());
return StorageType.valueOf(s.toUpperCase(Locale.ENGLISH));
}
private static List<StorageType> getNonTransientTypes() {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hdfs;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.hadoop.HadoopIllegalArgumentException;
@ -57,16 +58,22 @@ public class XAttrHelper {
}
NameSpace ns;
final String prefix = name.substring(0, prefixIndex).toLowerCase();
if (prefix.equals(NameSpace.USER.toString().toLowerCase())) {
final String prefix = name.substring(0, prefixIndex)
.toLowerCase(Locale.ENGLISH);
if (prefix.equals(
NameSpace.USER.toString().toLowerCase(Locale.ENGLISH))) {
ns = NameSpace.USER;
} else if (prefix.equals(NameSpace.TRUSTED.toString().toLowerCase())) {
} else if (prefix.equals(
NameSpace.TRUSTED.toString().toLowerCase(Locale.ENGLISH))) {
ns = NameSpace.TRUSTED;
} else if (prefix.equals(NameSpace.SYSTEM.toString().toLowerCase())) {
} else if (prefix.equals(
NameSpace.SYSTEM.toString().toLowerCase(Locale.ENGLISH))) {
ns = NameSpace.SYSTEM;
} else if (prefix.equals(NameSpace.SECURITY.toString().toLowerCase())) {
} else if (prefix.equals(
NameSpace.SECURITY.toString().toLowerCase(Locale.ENGLISH))) {
ns = NameSpace.SECURITY;
} else if (prefix.equals(NameSpace.RAW.toString().toLowerCase())) {
} else if (prefix.equals(
NameSpace.RAW.toString().toLowerCase(Locale.ENGLISH))) {
ns = NameSpace.RAW;
} else {
throw new HadoopIllegalArgumentException("An XAttr name must be " +
@ -145,7 +152,7 @@ public class XAttrHelper {
}
String namespace = xAttr.getNameSpace().toString();
return namespace.toLowerCase() + "." + xAttr.getName();
return namespace.toLowerCase(Locale.ENGLISH) + "." + xAttr.getName();
}
/**

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hdfs.protocol;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
@ -98,7 +99,7 @@ public class HdfsConstants {
/** Covert the given String to a RollingUpgradeAction. */
public static RollingUpgradeAction fromString(String s) {
return MAP.get(s.toUpperCase());
return MAP.get(s.toUpperCase(Locale.ENGLISH));
}
}

View File

@ -30,6 +30,7 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.Locale;
/** A collection of block storage policies. */
public class BlockStoragePolicySuite {
@ -131,7 +132,8 @@ public class BlockStoragePolicySuite {
}
public static String buildXAttrName() {
return XAttrNS.toString().toLowerCase() + "." + STORAGE_POLICY_XATTR_NAME;
return XAttrNS.toString().toLowerCase(Locale.ENGLISH) + "."
+ STORAGE_POLICY_XATTR_NAME;
}
public static XAttr buildXAttr(byte policyId) {

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.server.common;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@ -53,7 +54,7 @@ public final class HdfsServerConstants {
public String getOptionString() {
return StartupOption.ROLLINGUPGRADE.getName() + " "
+ name().toLowerCase();
+ name().toLowerCase(Locale.ENGLISH);
}
public boolean matches(StartupOption option) {
@ -76,7 +77,7 @@ public final class HdfsServerConstants {
public static String getAllOptionString() {
final StringBuilder b = new StringBuilder("<");
for(RollingUpgradeStartupOption opt : VALUES) {
b.append(opt.name().toLowerCase()).append("|");
b.append(opt.name().toLowerCase(Locale.ENGLISH)).append("|");
}
b.setCharAt(b.length() - 1, '>');
return b.toString();

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hdfs.server.datanode;
import java.util.Locale;
import java.util.regex.Pattern;
import java.io.File;
@ -88,7 +89,8 @@ public class StorageLocation {
String classString = matcher.group(1);
location = matcher.group(2);
if (!classString.isEmpty()) {
storageType = StorageType.valueOf(classString.toUpperCase());
storageType = StorageType.valueOf(
classString.toUpperCase(Locale.ENGLISH));
}
}

View File

@ -75,6 +75,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumMap;
import java.util.Locale;
import java.util.List;
import java.util.zip.CheckedInputStream;
import java.util.zip.Checksum;
@ -4348,7 +4349,7 @@ public abstract class FSEditLogOp {
public RollingUpgradeOp(FSEditLogOpCodes code, String name) {
super(code);
this.name = name.toUpperCase();
this.name = name.toUpperCase(Locale.ENGLISH);
}
static RollingUpgradeOp getStartInstance(OpInstanceCache cache) {

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.namenode;
import com.google.common.base.Objects;
import org.apache.hadoop.hdfs.StorageType;
import java.util.Locale;
public class QuotaByStorageTypeEntry {
private StorageType type;
@ -53,7 +54,7 @@ import org.apache.hadoop.hdfs.StorageType;
public String toString() {
StringBuilder sb = new StringBuilder();
assert (type != null);
sb.append(type.toString().toLowerCase());
sb.append(type.toString().toLowerCase(Locale.ENGLISH));
sb.append(':');
sb.append(quota);
return sb.toString();

View File

@ -587,7 +587,7 @@ public class SecondaryNameNode implements Runnable,
return 0;
}
String cmd = opts.getCommand().toString().toLowerCase();
String cmd = opts.getCommand().toString().toLowerCase(Locale.ENGLISH);
int exitCode = 0;
try {

View File

@ -24,6 +24,7 @@ import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.hadoop.HadoopIllegalArgumentException;
@ -79,19 +80,19 @@ public class GetConf extends Configured implements Tool {
private static final Map<String, CommandHandler> map;
static {
map = new HashMap<String, CommandHandler>();
map.put(NAMENODE.getName().toLowerCase(),
map.put(NAMENODE.getName().toLowerCase(Locale.ENGLISH),
new NameNodesCommandHandler());
map.put(SECONDARY.getName().toLowerCase(),
map.put(SECONDARY.getName().toLowerCase(Locale.ENGLISH),
new SecondaryNameNodesCommandHandler());
map.put(BACKUP.getName().toLowerCase(),
map.put(BACKUP.getName().toLowerCase(Locale.ENGLISH),
new BackupNodesCommandHandler());
map.put(INCLUDE_FILE.getName().toLowerCase(),
map.put(INCLUDE_FILE.getName().toLowerCase(Locale.ENGLISH),
new CommandHandler(DFSConfigKeys.DFS_HOSTS));
map.put(EXCLUDE_FILE.getName().toLowerCase(),
map.put(EXCLUDE_FILE.getName().toLowerCase(Locale.ENGLISH),
new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE));
map.put(NNRPCADDRESSES.getName().toLowerCase(),
map.put(NNRPCADDRESSES.getName().toLowerCase(Locale.ENGLISH),
new NNRpcAddressesCommandHandler());
map.put(CONFKEY.getName().toLowerCase(),
map.put(CONFKEY.getName().toLowerCase(Locale.ENGLISH),
new PrintConfKeyCommandHandler());
}
@ -116,7 +117,7 @@ public class GetConf extends Configured implements Tool {
}
public static CommandHandler getHandler(String cmd) {
return map.get(cmd.toLowerCase());
return map.get(cmd.toLowerCase(Locale.ENGLISH));
}
}

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.tools.offlineEditsViewer;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Locale;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -43,7 +44,7 @@ public class OfflineEditsVisitorFactory {
*/
static public OfflineEditsVisitor getEditsVisitor(String filename,
String processor, boolean printToScreen) throws IOException {
if(processor.toLowerCase().equals("binary")) {
if(processor.toLowerCase(Locale.ENGLISH).equals("binary")) {
return new BinaryEditsVisitor(filename);
}
OfflineEditsVisitor vis;
@ -59,9 +60,9 @@ public class OfflineEditsVisitorFactory {
outs[1] = System.out;
out = new TeeOutputStream(outs);
}
if(processor.toLowerCase().equals("xml")) {
if(processor.toLowerCase(Locale.ENGLISH).equals("xml")) {
vis = new XmlEditsVisitor(out);
} else if(processor.toLowerCase().equals("stats")) {
} else if(processor.toLowerCase(Locale.ENGLISH).equals("stats")) {
vis = new StatisticsEditsVisitor(out);
} else {
throw new IOException("Unknown proccesor " + processor +

View File

@ -37,6 +37,7 @@ import org.apache.hadoop.hdfs.web.JsonUtil;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION;
@ -141,7 +142,7 @@ class FSImageHandler extends SimpleChannelInboundHandler<HttpRequest> {
private static String getOp(QueryStringDecoder decoder) {
Map<String, List<String>> parameters = decoder.parameters();
return parameters.containsKey("op")
? parameters.get("op").get(0).toUpperCase() : null;
? parameters.get("op").get(0).toUpperCase(Locale.ENGLISH) : null;
}
private static String getPath(QueryStringDecoder decoder)

View File

@ -23,6 +23,7 @@ import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
@ -96,7 +97,7 @@ public class AuthFilter extends AuthenticationFilter {
final Map<String, List<String>> m = new HashMap<String, List<String>>();
for(Map.Entry<String, String[]> entry : original.entrySet()) {
final String key = entry.getKey().toLowerCase();
final String key = entry.getKey().toLowerCase(Locale.ENGLISH);
List<String> strings = m.get(key);
if (strings == null) {
strings = new ArrayList<String>();

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.web;
import java.net.URI;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import javax.ws.rs.core.MultivaluedMap;
@ -75,7 +76,7 @@ public class ParamFilter implements ResourceFilter {
final MultivaluedMap<String, String> parameters) {
UriBuilder b = UriBuilder.fromUri(uri).replaceQuery("");
for(Map.Entry<String, List<String>> e : parameters.entrySet()) {
final String key = e.getKey().toLowerCase();
final String key = e.getKey().toLowerCase(Locale.ENGLISH);
for(String v : e.getValue()) {
b = b.queryParam(key, v);
}

View File

@ -32,6 +32,7 @@ import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.StringTokenizer;
@ -1242,7 +1243,7 @@ public class WebHdfsFileSystem extends FileSystem
if (query == null) {
return url;
}
final String lower = query.toLowerCase();
final String lower = query.toLowerCase(Locale.ENGLISH);
if (!lower.startsWith(OFFSET_PARAM_PREFIX)
&& !lower.contains("&" + OFFSET_PARAM_PREFIX)) {
return url;
@ -1253,7 +1254,7 @@ public class WebHdfsFileSystem extends FileSystem
for(final StringTokenizer st = new StringTokenizer(query, "&");
st.hasMoreTokens();) {
final String token = st.nextToken();
if (!token.toLowerCase().startsWith(OFFSET_PARAM_PREFIX)) {
if (!token.toLowerCase(Locale.ENGLISH).startsWith(OFFSET_PARAM_PREFIX)) {
if (b == null) {
b = new StringBuilder("?").append(token);
} else {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hdfs.web.resources;
import java.util.Arrays;
import java.util.Locale;
abstract class EnumParam<E extends Enum<E>> extends Param<E, EnumParam.Domain<E>> {
EnumParam(final Domain<E> domain, final E value) {
@ -40,7 +41,7 @@ abstract class EnumParam<E extends Enum<E>> extends Param<E, EnumParam.Domain<E>
@Override
final E parse(final String str) {
return Enum.valueOf(enumClass, str.toUpperCase());
return Enum.valueOf(enumClass, str.toUpperCase(Locale.ENGLISH));
}
}
}

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.web.resources;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.Locale;
abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>, EnumSetParam.Domain<E>> {
/** Convert an EnumSet to a string of comma separated values. */
@ -82,7 +83,8 @@ abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>, EnumSet
i = j > 0 ? j + 1 : 0;
j = str.indexOf(',', i);
final String sub = j >= 0? str.substring(i, j): str.substring(i);
set.add(Enum.valueOf(enumClass, sub.trim().toUpperCase()));
set.add(
Enum.valueOf(enumClass, sub.trim().toUpperCase(Locale.ENGLISH)));
}
}
return set;

View File

@ -25,6 +25,7 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import java.util.ArrayList;
import java.util.Locale;
import org.apache.hadoop.hdfs.protocol.SnapshotException;
import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
@ -70,7 +71,7 @@ public class TestSnapshotManager {
Assert.fail("Expected SnapshotException not thrown");
} catch (SnapshotException se) {
Assert.assertTrue(
se.getMessage().toLowerCase().contains("rollover"));
se.getMessage().toLowerCase(Locale.ENGLISH).contains("rollover"));
}
// Delete a snapshot to free up a slot.
@ -86,7 +87,7 @@ public class TestSnapshotManager {
Assert.fail("Expected SnapshotException not thrown");
} catch (SnapshotException se) {
Assert.assertTrue(
se.getMessage().toLowerCase().contains("rollover"));
se.getMessage().toLowerCase(Locale.ENGLISH).contains("rollover"));
}
}
}

View File

@ -23,6 +23,7 @@ import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
@ -711,7 +712,8 @@ public class JobHistoryEventHandler extends AbstractService
private void processEventForTimelineServer(HistoryEvent event, JobId jobId,
long timestamp) {
TimelineEvent tEvent = new TimelineEvent();
tEvent.setEventType(event.getEventType().name().toUpperCase());
tEvent.setEventType(
event.getEventType().name().toUpperCase(Locale.ENGLISH));
tEvent.setTimestamp(timestamp);
TimelineEntity tEntity = new TimelineEntity();

View File

@ -227,7 +227,7 @@ public class AppController extends Controller implements AMParams {
try {
String tt = $(TASK_TYPE);
tt = tt.isEmpty() ? "All" : StringUtils.capitalize(MRApps.taskType(tt).
toString().toLowerCase(Locale.US));
toString().toLowerCase(Locale.ENGLISH));
setTitle(join(tt, " Tasks for ", $(JOB_ID)));
} catch (Exception e) {
LOG.error("Failed to render tasks page with task type : "

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.mapreduce;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobPriority;
@ -314,7 +315,7 @@ public class TypeConverter {
QueueState state) {
org.apache.hadoop.mapreduce.QueueState qState =
org.apache.hadoop.mapreduce.QueueState.getState(
state.toString().toLowerCase());
state.toString().toLowerCase(Locale.ENGLISH));
return qState;
}

View File

@ -30,6 +30,7 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import com.google.common.annotations.VisibleForTesting;
@ -303,7 +304,7 @@ public class MRApps extends Apps {
remoteFS.getWorkingDirectory()));
String name = (null == u.getFragment())
? p.getName() : u.getFragment();
if (!name.toLowerCase().endsWith(".jar")) {
if (!name.toLowerCase(Locale.ENGLISH).endsWith(".jar")) {
linkLookup.put(p, name);
}
}
@ -317,7 +318,7 @@ public class MRApps extends Apps {
if (name == null) {
name = p.getName();
}
if(!name.toLowerCase().endsWith(".jar")) {
if(!name.toLowerCase(Locale.ENGLISH).endsWith(".jar")) {
MRApps.addToEnvironment(
environment,
classpathEnvVar,

View File

@ -22,6 +22,7 @@ import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobStatus.State;
@ -153,7 +154,8 @@ public class TestTypeConverter {
org.apache.hadoop.mapreduce.QueueInfo returned =
TypeConverter.fromYarn(queueInfo, new Configuration());
Assert.assertEquals("queueInfo translation didn't work.",
returned.getState().toString(), queueInfo.getQueueState().toString().toLowerCase());
returned.getState().toString(),
queueInfo.getQueueState().toString().toLowerCase(Locale.ENGLISH));
}
/**

View File

@ -28,6 +28,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.concurrent.atomic.AtomicBoolean;
@ -116,7 +117,7 @@ abstract public class Task implements Writable, Configurable {
* BYTES_READ counter and second one is of the BYTES_WRITTEN counter.
*/
protected static String[] getFileSystemCounterNames(String uriScheme) {
String scheme = uriScheme.toUpperCase();
String scheme = uriScheme.toUpperCase(Locale.ENGLISH);
return new String[]{scheme+"_BYTES_READ", scheme+"_BYTES_WRITTEN"};
}

View File

@ -227,7 +227,7 @@ public abstract class FileSystemCounterGroup<C extends Counter>
}
private String checkScheme(String scheme) {
String fixed = scheme.toUpperCase(Locale.US);
String fixed = scheme.toUpperCase(Locale.ENGLISH);
String interned = schemes.putIfAbsent(fixed, fixed);
if (schemes.size() > MAX_NUM_SCHEMES) {
// mistakes or abuses

View File

@ -473,7 +473,7 @@ public class DistributedCache {
if (fragment == null) {
return false;
}
String lowerCaseFragment = fragment.toLowerCase();
String lowerCaseFragment = fragment.toLowerCase(Locale.ENGLISH);
if (fragments.contains(lowerCaseFragment)) {
return false;
}
@ -488,7 +488,7 @@ public class DistributedCache {
if (fragment == null) {
return false;
}
String lowerCaseFragment = fragment.toLowerCase();
String lowerCaseFragment = fragment.toLowerCase(Locale.ENGLISH);
if (fragments.contains(lowerCaseFragment)) {
return false;
}

View File

@ -29,6 +29,7 @@ import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -162,7 +163,8 @@ public class DBInputFormat<T extends DBWritable>
this.connection = createConnection();
DatabaseMetaData dbMeta = connection.getMetaData();
this.dbProductName = dbMeta.getDatabaseProductName().toUpperCase();
this.dbProductName =
dbMeta.getDatabaseProductName().toUpperCase(Locale.ENGLISH);
}
catch (Exception ex) {
throw new RuntimeException(ex);

View File

@ -22,6 +22,7 @@ import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.HashSet;
import java.util.Arrays;
@ -222,12 +223,12 @@ public class CLI extends Configured implements Tool {
taskType = argv[2];
taskState = argv[3];
displayTasks = true;
if (!taskTypes.contains(taskType.toUpperCase())) {
if (!taskTypes.contains(taskType.toUpperCase(Locale.ENGLISH))) {
System.out.println("Error: Invalid task-type: " + taskType);
displayUsage(cmd);
return exitCode;
}
if (!taskStates.contains(taskState.toLowerCase())) {
if (!taskStates.contains(taskState.toLowerCase(Locale.ENGLISH))) {
System.out.println("Error: Invalid task-state: " + taskState);
displayUsage(cmd);
return exitCode;
@ -593,7 +594,8 @@ public class CLI extends Configured implements Tool {
throws IOException, InterruptedException {
TaskReport[] reports=null;
reports = job.getTaskReports(TaskType.valueOf(type.toUpperCase()));
reports = job.getTaskReports(
TaskType.valueOf(type.toUpperCase(Locale.ENGLISH)));
for (TaskReport report : reports) {
TIPStatus status = report.getCurrentStatus();
if ((state.equalsIgnoreCase("pending") && status ==TIPStatus.PENDING) ||

View File

@ -29,6 +29,7 @@ import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintStream;
import java.util.Date;
import java.util.Locale;
import java.util.Random;
import java.util.StringTokenizer;
import org.apache.commons.logging.Log;
@ -154,16 +155,16 @@ public class TestDFSIO implements Tool {
static ByteMultiple parseString(String sMultiple) {
if(sMultiple == null || sMultiple.isEmpty()) // MB by default
return MB;
String sMU = sMultiple.toUpperCase();
if(B.name().toUpperCase().endsWith(sMU))
String sMU = sMultiple.toUpperCase(Locale.ENGLISH);
if(B.name().toUpperCase(Locale.ENGLISH).endsWith(sMU))
return B;
if(KB.name().toUpperCase().endsWith(sMU))
if(KB.name().toUpperCase(Locale.ENGLISH).endsWith(sMU))
return KB;
if(MB.name().toUpperCase().endsWith(sMU))
if(MB.name().toUpperCase(Locale.ENGLISH).endsWith(sMU))
return MB;
if(GB.name().toUpperCase().endsWith(sMU))
if(GB.name().toUpperCase(Locale.ENGLISH).endsWith(sMU))
return GB;
if(TB.name().toUpperCase().endsWith(sMU))
if(TB.name().toUpperCase(Locale.ENGLISH).endsWith(sMU))
return TB;
throw new IllegalArgumentException("Unsupported ByteMultiple "+sMultiple);
}
@ -736,7 +737,7 @@ public class TestDFSIO implements Tool {
}
for (int i = 0; i < args.length; i++) { // parse command line
if (args[i].toLowerCase().startsWith("-read")) {
if (args[i].toLowerCase(Locale.ENGLISH).startsWith("-read")) {
testType = TestType.TEST_TYPE_READ;
} else if (args[i].equalsIgnoreCase("-write")) {
testType = TestType.TEST_TYPE_WRITE;
@ -755,9 +756,10 @@ public class TestDFSIO implements Tool {
testType = TestType.TEST_TYPE_TRUNCATE;
} else if (args[i].equalsIgnoreCase("-clean")) {
testType = TestType.TEST_TYPE_CLEANUP;
} else if (args[i].toLowerCase().startsWith("-seq")) {
} else if (args[i].toLowerCase(Locale.ENGLISH).startsWith("-seq")) {
isSequential = true;
} else if (args[i].toLowerCase().startsWith("-compression")) {
} else if (
args[i].toLowerCase(Locale.ENGLISH).startsWith("-compression")) {
compressionClass = args[++i];
} else if (args[i].equalsIgnoreCase("-nrfiles")) {
nrFiles = Integer.parseInt(args[++i]);

View File

@ -24,6 +24,7 @@ import java.io.OutputStream;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.Collections;
import java.util.Locale;
import java.util.Random;
import java.util.List;
import java.util.ArrayList;
@ -556,7 +557,10 @@ public class TestFileSystem extends TestCase {
static void checkPath(MiniDFSCluster cluster, FileSystem fileSys) throws IOException {
InetSocketAddress add = cluster.getNameNode().getNameNodeAddress();
// Test upper/lower case
fileSys.checkPath(new Path("hdfs://" + add.getHostName().toUpperCase() + ":" + add.getPort()));
fileSys.checkPath(
new Path("hdfs://"
+ add.getHostName().toUpperCase(Locale.ENGLISH)
+ ":" + add.getPort()));
}
public void testFsClose() throws Exception {

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.fs.slive;
import java.util.Locale;
/**
* Constants used in various places in slive
*/
@ -35,7 +37,7 @@ class Constants {
enum Distribution {
BEG, END, UNIFORM, MID;
String lowerName() {
return this.name().toLowerCase();
return this.name().toLowerCase(Locale.ENGLISH);
}
}
@ -45,7 +47,7 @@ class Constants {
enum OperationType {
READ, APPEND, RENAME, LS, MKDIR, DELETE, CREATE;
String lowerName() {
return this.name().toLowerCase();
return this.name().toLowerCase(Locale.ENGLISH);
}
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.fs.slive;
import java.util.Locale;
import org.apache.hadoop.fs.slive.Constants.Distribution;
/**
@ -52,7 +53,8 @@ class OperationData {
percent = (Double.parseDouble(pieces[0]) / 100.0d);
} else if (pieces.length >= 2) {
percent = (Double.parseDouble(pieces[0]) / 100.0d);
distribution = Distribution.valueOf(pieces[1].toUpperCase());
distribution =
Distribution.valueOf(pieces[1].toUpperCase(Locale.ENGLISH));
}
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.fs.slive;
import java.util.Locale;
import org.apache.hadoop.io.Text;
/**
@ -67,7 +68,8 @@ class OperationOutput {
"Invalid key format - no type seperator - " + TYPE_SEP);
}
try {
dataType = OutputType.valueOf(key.substring(0, place).toUpperCase());
dataType = OutputType.valueOf(
key.substring(0, place).toUpperCase(Locale.ENGLISH));
} catch (Exception e) {
throw new IllegalArgumentException(
"Invalid key format - invalid output type", e);

View File

@ -27,6 +27,7 @@ import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.TreeMap;
@ -157,7 +158,7 @@ public class SliveTest implements Tool {
if (val == null) {
return false;
}
String cleanupOpt = val.toLowerCase().trim();
String cleanupOpt = val.toLowerCase(Locale.ENGLISH).trim();
if (cleanupOpt.equals("true") || cleanupOpt.equals("1")) {
return true;
} else {

View File

@ -23,6 +23,7 @@ import java.util.ArrayList;
import java.util.Date;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Random;
@ -214,23 +215,25 @@ public class FileBench extends Configured implements Tool {
if (!(fmt == Format.txt || cod == CCodec.pln)) {
for (CType typ : ct) {
String fn =
fmt.name().toUpperCase() + "_" +
cod.name().toUpperCase() + "_" +
typ.name().toUpperCase();
fmt.name().toUpperCase(Locale.ENGLISH) + "_" +
cod.name().toUpperCase(Locale.ENGLISH) + "_" +
typ.name().toUpperCase(Locale.ENGLISH);
typ.configure(job);
System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
System.out.print(
rwop.name().toUpperCase(Locale.ENGLISH) + " " + fn + ": ");
System.out.println(rwop.exec(fn, job) / 1000 +
" seconds");
}
} else {
String fn =
fmt.name().toUpperCase() + "_" +
cod.name().toUpperCase();
fmt.name().toUpperCase(Locale.ENGLISH) + "_" +
cod.name().toUpperCase(Locale.ENGLISH);
Path p = new Path(root, fn);
if (rwop == RW.r && !fs.exists(p)) {
fn += cod.getExt();
}
System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
System.out.print(
rwop.name().toUpperCase(Locale.ENGLISH) + " " + fn + ": ");
System.out.println(rwop.exec(fn, job) / 1000 +
" seconds");
}

View File

@ -28,6 +28,7 @@ import java.io.File;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
@ -280,7 +281,7 @@ public class TestMapRed extends Configured implements Tool {
public void map(WritableComparable key, Text value,
OutputCollector<Text, Text> output,
Reporter reporter) throws IOException {
String str = value.toString().toLowerCase();
String str = value.toString().toLowerCase(Locale.ENGLISH);
output.collect(new Text(str), value);
}

View File

@ -27,6 +27,7 @@ import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Locale;
import java.util.Random;
import org.apache.commons.logging.Log;
@ -102,7 +103,7 @@ public class DBCountPageView extends Configured implements Tool {
private void createConnection(String driverClassName
, String url) throws Exception {
if(driverClassName.toLowerCase().contains("oracle")) {
if(driverClassName.toLowerCase(Locale.ENGLISH).contains("oracle")) {
isOracle = true;
}
Class.forName(driverClassName);

View File

@ -35,6 +35,7 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
/**
@ -329,7 +330,8 @@ public class VersionInfoMojo extends AbstractMojo {
}
private String normalizePath(File file) {
return file.getPath().toUpperCase().replaceAll("\\\\", "/");
return file.getPath().toUpperCase(Locale.ENGLISH)
.replaceAll("\\\\", "/");
}
});
byte[] md5 = computeMD5(files);

View File

@ -979,8 +979,8 @@ public class AzureNativeFileSystemStore implements NativeFileSystemStore {
private String verifyAndConvertToStandardFormat(String rawDir) throws URISyntaxException {
URI asUri = new URI(rawDir);
if (asUri.getAuthority() == null
|| asUri.getAuthority().toLowerCase(Locale.US).equalsIgnoreCase(
sessionUri.getAuthority().toLowerCase(Locale.US))) {
|| asUri.getAuthority().toLowerCase(Locale.ENGLISH).equalsIgnoreCase(
sessionUri.getAuthority().toLowerCase(Locale.ENGLISH))) {
// Applies to me.
return trim(asUri.getPath(), "/");
} else {

View File

@ -121,8 +121,9 @@ public class DistCpUtils {
*/
public static Class<? extends InputFormat> getStrategy(Configuration conf,
DistCpOptions options) {
String confLabel = "distcp." +
options.getCopyStrategy().toLowerCase(Locale.getDefault()) + ".strategy.impl";
String confLabel = "distcp."
+ options.getCopyStrategy().toLowerCase(Locale.ENGLISH)
+ ".strategy.impl";
return conf.getClass(confLabel, UniformSizeInputFormat.class, InputFormat.class);
}
@ -221,7 +222,8 @@ public class DistCpUtils {
final boolean preserveXAttrs = attributes.contains(FileAttribute.XATTR);
if (preserveXAttrs || preserveRawXattrs) {
final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
final String rawNS =
XAttr.NameSpace.RAW.name().toLowerCase(Locale.ENGLISH);
Map<String, byte[]> srcXAttrs = srcFileStatus.getXAttrs();
Map<String, byte[]> targetXAttrs = getXAttrs(targetFS, path);
if (srcXAttrs != null && !srcXAttrs.equals(targetXAttrs)) {
@ -321,7 +323,8 @@ public class DistCpUtils {
copyListingFileStatus.setXAttrs(srcXAttrs);
} else {
Map<String, byte[]> trgXAttrs = Maps.newHashMap();
final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
final String rawNS =
XAttr.NameSpace.RAW.name().toLowerCase(Locale.ENGLISH);
for (Map.Entry<String, byte[]> ent : srcXAttrs.entrySet()) {
final String xattrName = ent.getKey();
if (xattrName.startsWith(rawNS)) {

View File

@ -31,6 +31,7 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Random;
import java.util.Stack;
import java.util.StringTokenizer;
@ -169,7 +170,9 @@ public class DistCpV1 implements Tool {
final char symbol;
private FileAttribute() {symbol = toString().toLowerCase().charAt(0);}
private FileAttribute() {
symbol = toString().toLowerCase(Locale.ENGLISH).charAt(0);
}
static EnumSet<FileAttribute> parse(String s) {
if (s == null || s.length() == 0) {

View File

@ -24,6 +24,7 @@ import org.apache.hadoop.mapred.gridmix.Statistics.JobStats;
import org.apache.hadoop.mapred.gridmix.Statistics.ClusterStats;
import java.util.concurrent.CountDownLatch;
import java.util.Locale;
import java.io.IOException;
enum GridmixJobSubmissionPolicy {
@ -84,6 +85,6 @@ enum GridmixJobSubmissionPolicy {
public static GridmixJobSubmissionPolicy getPolicy(
Configuration conf, GridmixJobSubmissionPolicy defaultPolicy) {
String policy = conf.get(JOB_SUBMISSION_POLICY, defaultPolicy.name());
return valueOf(policy.toUpperCase());
return valueOf(policy.toUpperCase(Locale.ENGLISH));
}
}

View File

@ -28,6 +28,7 @@ import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.ArrayList;
@ -319,42 +320,43 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
}
for (int i = 0; i < args.length - (inputFilename == null ? 0 : 1); ++i) {
if ("-h".equals(args[i].toLowerCase())
|| "-help".equals(args[i].toLowerCase())) {
if ("-h".equals(args[i].toLowerCase(Locale.ENGLISH))
|| "-help".equals(args[i].toLowerCase(Locale.ENGLISH))) {
usage();
return 0;
}
if ("-c".equals(args[i].toLowerCase())
|| "-collect-prefixes".equals(args[i].toLowerCase())) {
if ("-c".equals(args[i].toLowerCase(Locale.ENGLISH))
|| "-collect-prefixes".equals(args[i].toLowerCase(Locale.ENGLISH))) {
collecting = true;
continue;
}
// these control the job digest
if ("-write-job-trace".equals(args[i].toLowerCase())) {
if ("-write-job-trace".equals(args[i].toLowerCase(Locale.ENGLISH))) {
++i;
jobTraceFilename = new Path(args[i]);
continue;
}
if ("-single-line-job-traces".equals(args[i].toLowerCase())) {
if ("-single-line-job-traces".equals(
args[i].toLowerCase(Locale.ENGLISH))) {
prettyprintTrace = false;
continue;
}
if ("-omit-task-details".equals(args[i].toLowerCase())) {
if ("-omit-task-details".equals(args[i].toLowerCase(Locale.ENGLISH))) {
omitTaskDetails = true;
continue;
}
if ("-write-topology".equals(args[i].toLowerCase())) {
if ("-write-topology".equals(args[i].toLowerCase(Locale.ENGLISH))) {
++i;
topologyFilename = new Path(args[i]);
continue;
}
if ("-job-digest-spectra".equals(args[i].toLowerCase())) {
if ("-job-digest-spectra".equals(args[i].toLowerCase(Locale.ENGLISH))) {
ArrayList<Integer> values = new ArrayList<Integer>();
++i;
@ -384,13 +386,13 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
continue;
}
if ("-d".equals(args[i].toLowerCase())
|| "-debug".equals(args[i].toLowerCase())) {
if ("-d".equals(args[i].toLowerCase(Locale.ENGLISH))
|| "-debug".equals(args[i].toLowerCase(Locale.ENGLISH))) {
debug = true;
continue;
}
if ("-spreads".equals(args[i].toLowerCase())) {
if ("-spreads".equals(args[i].toLowerCase(Locale.ENGLISH))) {
int min = Integer.parseInt(args[i + 1]);
int max = Integer.parseInt(args[i + 2]);
@ -404,22 +406,22 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
}
// These control log-wide CDF outputs
if ("-delays".equals(args[i].toLowerCase())) {
if ("-delays".equals(args[i].toLowerCase(Locale.ENGLISH))) {
delays = true;
continue;
}
if ("-runtimes".equals(args[i].toLowerCase())) {
if ("-runtimes".equals(args[i].toLowerCase(Locale.ENGLISH))) {
runtimes = true;
continue;
}
if ("-tasktimes".equals(args[i].toLowerCase())) {
if ("-tasktimes".equals(args[i].toLowerCase(Locale.ENGLISH))) {
collectTaskTimes = true;
continue;
}
if ("-v1".equals(args[i].toLowerCase())) {
if ("-v1".equals(args[i].toLowerCase(Locale.ENGLISH))) {
version = 1;
continue;
}

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.StringTokenizer;
@ -433,7 +434,7 @@ public class JobBuilder {
return Values.SUCCESS;
}
return Values.valueOf(name.toUpperCase());
return Values.valueOf(name.toUpperCase(Locale.ENGLISH));
}
private void processTaskUpdatedEvent(TaskUpdatedEvent event) {

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.TreeSet;
@ -243,7 +244,7 @@ public class LoggedTask implements DeepCompare {
}
private static String canonicalizeCounterName(String nonCanonicalName) {
String result = nonCanonicalName.toLowerCase();
String result = nonCanonicalName.toLowerCase(Locale.ENGLISH);
result = result.replace(' ', '|');
result = result.replace('-', '|');

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.TreeSet;
@ -611,7 +612,7 @@ public class LoggedTaskAttempt implements DeepCompare {
}
private static String canonicalizeCounterName(String nonCanonicalName) {
String result = nonCanonicalName.toLowerCase();
String result = nonCanonicalName.toLowerCase(Locale.ENGLISH);
result = result.replace(' ', '|');
result = result.replace('-', '|');

View File

@ -43,7 +43,7 @@ public class Environment extends Properties {
// http://lopica.sourceforge.net/os.html
String command = null;
String OS = System.getProperty("os.name");
String lowerOs = OS.toLowerCase();
String lowerOs = OS.toLowerCase(Locale.ENGLISH);
if (OS.indexOf("Windows") > -1) {
command = "cmd /C set";
} else if (lowerOs.indexOf("ix") > -1 || lowerOs.indexOf("linux") > -1

View File

@ -26,6 +26,7 @@ import java.text.DecimalFormat;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import org.apache.commons.cli.CommandLine;
@ -173,7 +174,7 @@ public class ApplicationCLI extends YarnCLI {
if (types != null) {
for (String type : types) {
if (!type.trim().isEmpty()) {
appTypes.add(type.toUpperCase().trim());
appTypes.add(type.toUpperCase(Locale.ENGLISH).trim());
}
}
}
@ -192,7 +193,7 @@ public class ApplicationCLI extends YarnCLI {
}
try {
appStates.add(YarnApplicationState.valueOf(state
.toUpperCase().trim()));
.toUpperCase(Locale.ENGLISH).trim()));
} catch (IllegalArgumentException ex) {
sysout.println("The application state " + state
+ " is invalid.");

View File

@ -27,6 +27,7 @@ import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import org.apache.commons.cli.CommandLine;
@ -110,7 +111,8 @@ public class NodeCLI extends YarnCLI {
if (types != null) {
for (String type : types) {
if (!type.trim().isEmpty()) {
nodeStates.add(NodeState.valueOf(type.trim().toUpperCase()));
nodeStates.add(
NodeState.valueOf(type.trim().toUpperCase(Locale.ENGLISH)));
}
}
}

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import org.apache.commons.lang.math.LongRange;
@ -213,7 +214,7 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
// Convert applicationTags to lower case and add
this.applicationTags = new HashSet<String>();
for (String tag : tags) {
this.applicationTags.add(tag.toLowerCase());
this.applicationTags.add(tag.toLowerCase(Locale.ENGLISH));
}
}
@ -258,7 +259,8 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
public void setApplicationStates(Set<String> applicationStates) {
EnumSet<YarnApplicationState> appStates = null;
for (YarnApplicationState state : YarnApplicationState.values()) {
if (applicationStates.contains(state.name().toLowerCase())) {
if (applicationStates.contains(
state.name().toLowerCase(Locale.ENGLISH))) {
if (appStates == null) {
appStates = EnumSet.of(state);
} else {

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.yarn.api.records.impl.pb;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience.Private;
@ -291,7 +292,7 @@ extends ApplicationSubmissionContext {
// Convert applicationTags to lower case and add
this.applicationTags = new HashSet<String>();
for (String tag : tags) {
this.applicationTags.add(tag.toLowerCase());
this.applicationTags.add(tag.toLowerCase(Locale.ENGLISH));
}
}

View File

@ -241,7 +241,8 @@ public class HamletGen {
puts(indent, "\n",
"private <T extends _> ", retName, "<T> ", methodName,
"_(T e, boolean inline) {\n",
" return new ", retName, "<T>(\"", retName.toLowerCase(Locale.US),
" return new ", retName, "<T>(\"",
retName.toLowerCase(Locale.ENGLISH),
"\", e, opt(", !endTagOptional.contains(retName), ", inline, ",
retName.equals("PRE"), ")); }");
}
@ -258,7 +259,7 @@ public class HamletGen {
puts(0, ") {");
puts(indent,
topMode ? "" : " closeAttrs();\n",
" return ", retName.toLowerCase(Locale.US), "_(this, ",
" return ", retName.toLowerCase(Locale.ENGLISH), "_(this, ",
isInline(className, retName), ");\n", "}");
} else if (params.length == 1) {
puts(0, "String selector) {");

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
import java.util.Collections;
import java.util.Locale;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
@ -147,7 +148,8 @@ public class AHSWebServices extends WebServices {
}
Set<String> appStates = parseQueries(statesQuery, true);
for (String appState : appStates) {
switch (YarnApplicationState.valueOf(appState.toUpperCase())) {
switch (YarnApplicationState.valueOf(
appState.toUpperCase(Locale.ENGLISH))) {
case FINISHED:
case FAILED:
case KILLED:

View File

@ -24,6 +24,7 @@ import java.util.Collection;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
@ -417,7 +418,7 @@ public class TimelineWebServices {
String[] strs = str.split(delimiter);
List<Field> fieldList = new ArrayList<Field>();
for (String s : strs) {
s = s.trim().toUpperCase();
s = s.trim().toUpperCase(Locale.ENGLISH);
if (s.equals("EVENTS")) {
fieldList.add(Field.EVENTS);
} else if (s.equals("LASTEVENTONLY")) {

View File

@ -23,6 +23,7 @@ import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
@ -164,7 +165,7 @@ public class WebServices {
if (checkAppStates
&& !appStates.contains(appReport.getYarnApplicationState().toString()
.toLowerCase())) {
.toLowerCase(Locale.ENGLISH))) {
continue;
}
if (finalStatusQuery != null && !finalStatusQuery.isEmpty()) {
@ -186,7 +187,7 @@ public class WebServices {
}
if (checkAppTypes
&& !appTypes.contains(appReport.getApplicationType().trim()
.toLowerCase())) {
.toLowerCase(Locale.ENGLISH))) {
continue;
}
@ -368,7 +369,8 @@ public class WebServices {
if (isState) {
try {
// enum string is in the uppercase
YarnApplicationState.valueOf(paramStr.trim().toUpperCase());
YarnApplicationState.valueOf(
paramStr.trim().toUpperCase(Locale.ENGLISH));
} catch (RuntimeException e) {
YarnApplicationState[] stateArray =
YarnApplicationState.values();
@ -378,7 +380,7 @@ public class WebServices {
+ allAppStates);
}
}
params.add(paramStr.trim().toLowerCase());
params.add(paramStr.trim().toLowerCase(Locale.ENGLISH));
}
}
}

View File

@ -29,6 +29,7 @@ import java.util.Collections;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
@ -754,7 +755,7 @@ public class ClientRMService extends AbstractService implements
if (applicationTypes != null && !applicationTypes.isEmpty()) {
String appTypeToMatch = caseSensitive
? application.getApplicationType()
: application.getApplicationType().toLowerCase();
: application.getApplicationType().toLowerCase(Locale.ENGLISH);
if (!applicationTypes.contains(appTypeToMatch)) {
continue;
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.resourcemanager.resource;
import java.util.Locale;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
@ -61,7 +62,7 @@ public class ResourceWeights {
sb.append(", ");
}
ResourceType resourceType = ResourceType.values()[i];
sb.append(resourceType.name().toLowerCase());
sb.append(resourceType.name().toLowerCase(Locale.ENGLISH));
sb.append(String.format(" weight=%.1f", getWeight(resourceType)));
}
sb.append(">");

View File

@ -24,6 +24,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.StringTokenizer;
@ -394,7 +395,8 @@ public class CapacitySchedulerConfiguration extends ReservationSchedulerConfigur
public QueueState getState(String queue) {
String state = get(getQueuePrefix(queue) + STATE);
return (state != null) ?
QueueState.valueOf(state.toUpperCase()) : QueueState.RUNNING;
QueueState.valueOf(state.toUpperCase(Locale.ENGLISH)) :
QueueState.RUNNING;
}
public void setAccessibleNodeLabels(String queue, Set<String> labels) {
@ -490,7 +492,7 @@ public class CapacitySchedulerConfiguration extends ReservationSchedulerConfigur
}
private static String getAclKey(QueueACL acl) {
return "acl_" + acl.toString().toLowerCase();
return "acl_" + acl.toString().toLowerCase(Locale.ENGLISH);
}
public AccessControlList getAcl(String queue, QueueACL acl) {

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@ -241,7 +242,7 @@ public class FairSchedulerConfiguration extends Configuration {
public static Resource parseResourceConfigValue(String val)
throws AllocationConfigurationException {
try {
val = val.toLowerCase();
val = val.toLowerCase(Locale.ENGLISH);
int memory = findResource(val, "mb");
int vcores = findResource(val, "vcores");
return BuilderUtils.newResource(memory, vcores);

View File

@ -17,6 +17,7 @@
*/
package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair;
import java.util.Locale;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.util.ReflectionUtils;
@ -72,7 +73,7 @@ public abstract class SchedulingPolicy {
throws AllocationConfigurationException {
@SuppressWarnings("rawtypes")
Class clazz;
String text = policy.toLowerCase();
String text = policy.toLowerCase(Locale.ENGLISH);
if (text.equalsIgnoreCase(FairSharePolicy.NAME)) {
clazz = FairSharePolicy.class;
} else if (text.equalsIgnoreCase(FifoPolicy.NAME)) {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import java.util.Locale;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.NODE_STATE;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.NODE_LABEL;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
@ -77,7 +78,7 @@ class NodesPage extends RmView {
.th(".nodeManagerVersion", "Version")._()._().tbody();
NodeState stateFilter = null;
if (type != null && !type.isEmpty()) {
stateFilter = NodeState.valueOf(type.toUpperCase());
stateFilter = NodeState.valueOf(type.toUpperCase(Locale.ENGLISH));
}
Collection<RMNode> rmNodes = this.rm.getRMContext().getRMNodes().values();
boolean isInactive = false;

View File

@ -30,6 +30,7 @@ import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
@ -257,7 +258,8 @@ public class RMWebServices {
} else {
acceptedStates = EnumSet.noneOf(NodeState.class);
for (String stateStr : states.split(",")) {
acceptedStates.add(NodeState.valueOf(stateStr.toUpperCase()));
acceptedStates.add(NodeState.valueOf(
stateStr.toUpperCase(Locale.ENGLISH)));
}
}
@ -506,7 +508,7 @@ public class RMWebServices {
// if no states, returns the counts of all RMAppStates
if (states.size() == 0) {
for (YarnApplicationState state : YarnApplicationState.values()) {
states.add(state.toString().toLowerCase());
states.add(state.toString().toLowerCase(Locale.ENGLISH));
}
}
// in case we extend to multiple applicationTypes in the future
@ -518,8 +520,9 @@ public class RMWebServices {
ConcurrentMap<ApplicationId, RMApp> apps = rm.getRMContext().getRMApps();
for (RMApp rmapp : apps.values()) {
YarnApplicationState state = rmapp.createApplicationState();
String type = rmapp.getApplicationType().trim().toLowerCase();
if (states.contains(state.toString().toLowerCase())) {
String type =
rmapp.getApplicationType().trim().toLowerCase(Locale.ENGLISH);
if (states.contains(state.toString().toLowerCase(Locale.ENGLISH))) {
if (types.contains(ANY)) {
countApp(scoreboard, state, ANY);
} else if (types.contains(type)) {
@ -554,7 +557,8 @@ public class RMWebServices {
if (isState) {
try {
// enum string is in the uppercase
YarnApplicationState.valueOf(paramStr.trim().toUpperCase());
YarnApplicationState.valueOf(
paramStr.trim().toUpperCase(Locale.ENGLISH));
} catch (RuntimeException e) {
YarnApplicationState[] stateArray =
YarnApplicationState.values();
@ -564,7 +568,7 @@ public class RMWebServices {
+ " specified. It should be one of " + allAppStates);
}
}
params.add(paramStr.trim().toLowerCase());
params.add(paramStr.trim().toLowerCase(Locale.ENGLISH));
}
}
}
@ -582,7 +586,8 @@ public class RMWebServices {
for (String state : states) {
Map<String, Long> partScoreboard = new HashMap<String, Long>();
scoreboard.put(
YarnApplicationState.valueOf(state.toUpperCase()), partScoreboard);
YarnApplicationState.valueOf(
state.toUpperCase(Locale.ENGLISH)), partScoreboard);
// types is verified no to be empty
for (String type : types) {
partScoreboard.put(type, 0L);