HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa)
This commit is contained in:
parent
b442aeec95
commit
d1c6accb6f
|
@ -21,6 +21,7 @@ import com.sun.javadoc.DocErrorReporter;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
class StabilityOptions {
|
||||
public static final String STABLE_OPTION = "-stable";
|
||||
|
@ -28,7 +29,7 @@ class StabilityOptions {
|
|||
public static final String UNSTABLE_OPTION = "-unstable";
|
||||
|
||||
public static Integer optionLength(String option) {
|
||||
String opt = option.toLowerCase();
|
||||
String opt = option.toLowerCase(Locale.ENGLISH);
|
||||
if (opt.equals(UNSTABLE_OPTION)) return 1;
|
||||
if (opt.equals(EVOLVING_OPTION)) return 1;
|
||||
if (opt.equals(STABLE_OPTION)) return 1;
|
||||
|
@ -38,7 +39,7 @@ class StabilityOptions {
|
|||
public static void validOptions(String[][] options,
|
||||
DocErrorReporter reporter) {
|
||||
for (int i = 0; i < options.length; i++) {
|
||||
String opt = options[i][0].toLowerCase();
|
||||
String opt = options[i][0].toLowerCase(Locale.ENGLISH);
|
||||
if (opt.equals(UNSTABLE_OPTION)) {
|
||||
RootDocProcessor.stability = UNSTABLE_OPTION;
|
||||
} else if (opt.equals(EVOLVING_OPTION)) {
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
package org.apache.hadoop.security.authentication.server;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
import java.util.Properties;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
@ -68,7 +69,8 @@ public abstract class AltKerberosAuthenticationHandler
|
|||
NON_BROWSER_USER_AGENTS, NON_BROWSER_USER_AGENTS_DEFAULT)
|
||||
.split("\\W*,\\W*");
|
||||
for (int i = 0; i < nonBrowserUserAgents.length; i++) {
|
||||
nonBrowserUserAgents[i] = nonBrowserUserAgents[i].toLowerCase();
|
||||
nonBrowserUserAgents[i] =
|
||||
nonBrowserUserAgents[i].toLowerCase(Locale.ENGLISH);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -120,7 +122,7 @@ public abstract class AltKerberosAuthenticationHandler
|
|||
if (userAgent == null) {
|
||||
return false;
|
||||
}
|
||||
userAgent = userAgent.toLowerCase();
|
||||
userAgent = userAgent.toLowerCase(Locale.ENGLISH);
|
||||
boolean isBrowser = true;
|
||||
for (String nonBrowserUserAgent : nonBrowserUserAgents) {
|
||||
if (userAgent.contains(nonBrowserUserAgent)) {
|
||||
|
|
|
@ -21,6 +21,7 @@ import java.io.IOException;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.directory.server.kerberos.shared.keytab.Keytab;
|
||||
|
@ -58,24 +59,25 @@ public class TestKerberosUtil {
|
|||
|
||||
// send null hostname
|
||||
Assert.assertEquals("When no hostname is sent",
|
||||
service + "/" + localHostname.toLowerCase(),
|
||||
service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
|
||||
KerberosUtil.getServicePrincipal(service, null));
|
||||
// send empty hostname
|
||||
Assert.assertEquals("When empty hostname is sent",
|
||||
service + "/" + localHostname.toLowerCase(),
|
||||
service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
|
||||
KerberosUtil.getServicePrincipal(service, ""));
|
||||
// send 0.0.0.0 hostname
|
||||
Assert.assertEquals("When 0.0.0.0 hostname is sent",
|
||||
service + "/" + localHostname.toLowerCase(),
|
||||
service + "/" + localHostname.toLowerCase(Locale.ENGLISH),
|
||||
KerberosUtil.getServicePrincipal(service, "0.0.0.0"));
|
||||
// send uppercase hostname
|
||||
Assert.assertEquals("When uppercase hostname is sent",
|
||||
service + "/" + testHost.toLowerCase(),
|
||||
service + "/" + testHost.toLowerCase(Locale.ENGLISH),
|
||||
KerberosUtil.getServicePrincipal(service, testHost));
|
||||
// send lowercase hostname
|
||||
Assert.assertEquals("When lowercase hostname is sent",
|
||||
service + "/" + testHost.toLowerCase(),
|
||||
KerberosUtil.getServicePrincipal(service, testHost.toLowerCase()));
|
||||
service + "/" + testHost.toLowerCase(Locale.ENGLISH),
|
||||
KerberosUtil.getServicePrincipal(
|
||||
service, testHost.toLowerCase(Locale.ENGLISH)));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -409,6 +409,8 @@ Trunk (Unreleased)
|
|||
HADOOP-10774. Update KerberosTestUtils for hadoop-auth tests when using
|
||||
IBM Java (sangamesh via aw)
|
||||
|
||||
HADOOP-11602. Fix toUpperCase/toLowerCase to use Locale.ENGLISH. (ozawa)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
HADOOP-7761. Improve the performance of raw comparisons. (todd)
|
||||
|
|
|
@ -1451,11 +1451,9 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
|||
return defaultValue;
|
||||
}
|
||||
|
||||
valueString = valueString.toLowerCase();
|
||||
|
||||
if ("true".equals(valueString))
|
||||
if (StringUtils.equalsIgnoreCase("true", valueString))
|
||||
return true;
|
||||
else if ("false".equals(valueString))
|
||||
else if (StringUtils.equalsIgnoreCase("false", valueString))
|
||||
return false;
|
||||
else return defaultValue;
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.apache.hadoop.crypto;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Defines properties of a CipherSuite. Modeled after the ciphers in
|
||||
|
@ -97,7 +98,7 @@ public enum CipherSuite {
|
|||
String[] parts = name.split("/");
|
||||
StringBuilder suffix = new StringBuilder();
|
||||
for (String part : parts) {
|
||||
suffix.append(".").append(part.toLowerCase());
|
||||
suffix.append(".").append(StringUtils.toLowerCase(part));
|
||||
}
|
||||
|
||||
return suffix.toString();
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.hadoop.fs.FileSystem;
|
|||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.security.ProviderUtils;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -422,7 +423,7 @@ public class JavaKeyStoreProvider extends KeyProvider {
|
|||
@Override
|
||||
public KeyVersion createKey(String name, byte[] material,
|
||||
Options options) throws IOException {
|
||||
Preconditions.checkArgument(name.equals(name.toLowerCase()),
|
||||
Preconditions.checkArgument(name.equals(StringUtils.toLowerCase(name)),
|
||||
"Uppercase key names are unsupported: %s", name);
|
||||
writeLock.lock();
|
||||
try {
|
||||
|
|
|
@ -65,6 +65,7 @@ import org.apache.hadoop.util.DataChecksum;
|
|||
import org.apache.hadoop.util.Progressable;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.apache.hadoop.util.ShutdownHookManager;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
|
@ -2795,8 +2796,10 @@ public abstract class FileSystem extends Configured implements Closeable {
|
|||
}
|
||||
|
||||
Key(URI uri, Configuration conf, long unique) throws IOException {
|
||||
scheme = uri.getScheme()==null?"":uri.getScheme().toLowerCase();
|
||||
authority = uri.getAuthority()==null?"":uri.getAuthority().toLowerCase();
|
||||
scheme = uri.getScheme()==null ?
|
||||
"" : StringUtils.toLowerCase(uri.getScheme());
|
||||
authority = uri.getAuthority()==null ?
|
||||
"" : StringUtils.toLowerCase(uri.getAuthority());
|
||||
this.unique = unique;
|
||||
|
||||
this.ugi = UserGroupInformation.getCurrentUser();
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.util.List;
|
|||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Defines the types of supported storage media. The default storage
|
||||
|
@ -78,7 +79,7 @@ public enum StorageType {
|
|||
}
|
||||
|
||||
public static StorageType parseStorageType(String s) {
|
||||
return StorageType.valueOf(s.toUpperCase());
|
||||
return StorageType.valueOf(StringUtils.toUpperCase(s));
|
||||
}
|
||||
|
||||
private static List<StorageType> getNonTransientTypes() {
|
||||
|
|
|
@ -106,7 +106,7 @@ public class AclEntry {
|
|||
sb.append("default:");
|
||||
}
|
||||
if (type != null) {
|
||||
sb.append(type.toString().toLowerCase());
|
||||
sb.append(StringUtils.toLowerCase(type.toString()));
|
||||
}
|
||||
sb.append(':');
|
||||
if (name != null) {
|
||||
|
@ -263,7 +263,8 @@ public class AclEntry {
|
|||
|
||||
AclEntryType aclType = null;
|
||||
try {
|
||||
aclType = Enum.valueOf(AclEntryType.class, split[index].toUpperCase());
|
||||
aclType = Enum.valueOf(
|
||||
AclEntryType.class, StringUtils.toUpperCase(split[index]));
|
||||
builder.setType(aclType);
|
||||
index++;
|
||||
} catch (IllegalArgumentException iae) {
|
||||
|
|
|
@ -79,7 +79,7 @@ class XAttrCommands extends FsCommand {
|
|||
String en = StringUtils.popOptionWithArgument("-e", args);
|
||||
if (en != null) {
|
||||
try {
|
||||
encoding = enValueOfFunc.apply(en.toUpperCase(Locale.ENGLISH));
|
||||
encoding = enValueOfFunc.apply(StringUtils.toUpperCase(en));
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid/unsupported encoding option specified: " + en);
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.util.Deque;
|
|||
|
||||
import org.apache.hadoop.fs.GlobPattern;
|
||||
import org.apache.hadoop.fs.shell.PathData;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Implements the -name expression for the
|
||||
|
@ -73,7 +74,7 @@ final class Name extends BaseExpression {
|
|||
public void prepare() throws IOException {
|
||||
String argPattern = getArgument(1);
|
||||
if (!caseSensitive) {
|
||||
argPattern = argPattern.toLowerCase();
|
||||
argPattern = StringUtils.toLowerCase(argPattern);
|
||||
}
|
||||
globPattern = new GlobPattern(argPattern);
|
||||
}
|
||||
|
@ -82,7 +83,7 @@ final class Name extends BaseExpression {
|
|||
public Result apply(PathData item, int depth) throws IOException {
|
||||
String name = getPath(item).getName();
|
||||
if (!caseSensitive) {
|
||||
name = name.toLowerCase();
|
||||
name = StringUtils.toLowerCase(name);
|
||||
}
|
||||
if (globPattern.matches(name)) {
|
||||
return Result.PASS;
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* A factory that will find the correct codec for a given filename.
|
||||
|
@ -66,10 +67,10 @@ public class CompressionCodecFactory {
|
|||
codecsByClassName.put(codec.getClass().getCanonicalName(), codec);
|
||||
|
||||
String codecName = codec.getClass().getSimpleName();
|
||||
codecsByName.put(codecName.toLowerCase(), codec);
|
||||
codecsByName.put(StringUtils.toLowerCase(codecName), codec);
|
||||
if (codecName.endsWith("Codec")) {
|
||||
codecName = codecName.substring(0, codecName.length() - "Codec".length());
|
||||
codecsByName.put(codecName.toLowerCase(), codec);
|
||||
codecsByName.put(StringUtils.toLowerCase(codecName), codec);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -246,7 +247,7 @@ public class CompressionCodecFactory {
|
|||
if (codec == null) {
|
||||
// trying to get the codec by name in case the name was specified
|
||||
// instead a class
|
||||
codec = codecsByName.get(codecName.toLowerCase());
|
||||
codec = codecsByName.get(StringUtils.toLowerCase(codecName));
|
||||
}
|
||||
return codec;
|
||||
}
|
||||
|
|
|
@ -44,6 +44,7 @@ import org.apache.commons.logging.LogFactory;
|
|||
import org.apache.hadoop.metrics2.MetricsFilter;
|
||||
import org.apache.hadoop.metrics2.MetricsPlugin;
|
||||
import org.apache.hadoop.metrics2.filter.GlobFilter;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Metrics configuration for MetricsSystemImpl
|
||||
|
@ -85,12 +86,12 @@ class MetricsConfig extends SubsetConfiguration {
|
|||
private ClassLoader pluginLoader;
|
||||
|
||||
MetricsConfig(Configuration c, String prefix) {
|
||||
super(c, prefix.toLowerCase(Locale.US), ".");
|
||||
super(c, StringUtils.toLowerCase(prefix), ".");
|
||||
}
|
||||
|
||||
static MetricsConfig create(String prefix) {
|
||||
return loadFirst(prefix, "hadoop-metrics2-"+ prefix.toLowerCase(Locale.US)
|
||||
+".properties", DEFAULT_FILE_NAME);
|
||||
return loadFirst(prefix, "hadoop-metrics2-" +
|
||||
StringUtils.toLowerCase(prefix) + ".properties", DEFAULT_FILE_NAME);
|
||||
}
|
||||
|
||||
static MetricsConfig create(String prefix, String... fileNames) {
|
||||
|
|
|
@ -61,6 +61,7 @@ import org.apache.hadoop.metrics2.lib.MetricsRegistry;
|
|||
import org.apache.hadoop.metrics2.lib.MetricsSourceBuilder;
|
||||
import org.apache.hadoop.metrics2.lib.MutableStat;
|
||||
import org.apache.hadoop.metrics2.util.MBeans;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.Time;
|
||||
|
||||
/**
|
||||
|
@ -616,7 +617,7 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource {
|
|||
LOG.debug("from environment variable: "+ System.getenv(MS_INIT_MODE_KEY));
|
||||
String m = System.getProperty(MS_INIT_MODE_KEY);
|
||||
String m2 = m == null ? System.getenv(MS_INIT_MODE_KEY) : m;
|
||||
return InitMode.valueOf((m2 == null ? InitMode.NORMAL.name() : m2)
|
||||
.toUpperCase(Locale.US));
|
||||
return InitMode.valueOf(
|
||||
StringUtils.toUpperCase((m2 == null ? InitMode.NORMAL.name() : m2)));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,7 +66,8 @@ public class SaslPropertiesResolver implements Configurable{
|
|||
CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION,
|
||||
QualityOfProtection.AUTHENTICATION.toString());
|
||||
for (int i=0; i < qop.length; i++) {
|
||||
qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase(Locale.ENGLISH)).getSaslQop();
|
||||
qop[i] = QualityOfProtection.valueOf(
|
||||
StringUtils.toUpperCase(qop[i])).getSaslQop();
|
||||
}
|
||||
properties.put(Sasl.QOP, StringUtils.join(",", qop));
|
||||
properties.put(Sasl.SERVER_AUTH, "true");
|
||||
|
|
|
@ -27,7 +27,6 @@ import java.security.PrivilegedAction;
|
|||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.ServiceLoader;
|
||||
|
||||
import javax.security.auth.kerberos.KerberosPrincipal;
|
||||
|
@ -44,6 +43,7 @@ import org.apache.hadoop.net.NetUtils;
|
|||
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenInfo;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
|
||||
//this will need to be replaced someday when there is a suitable replacement
|
||||
|
@ -182,7 +182,8 @@ public class SecurityUtil {
|
|||
if (fqdn == null || fqdn.isEmpty() || fqdn.equals("0.0.0.0")) {
|
||||
fqdn = getLocalHostName();
|
||||
}
|
||||
return components[0] + "/" + fqdn.toLowerCase(Locale.US) + "@" + components[2];
|
||||
return components[0] + "/" +
|
||||
StringUtils.toLowerCase(fqdn) + "@" + components[2];
|
||||
}
|
||||
|
||||
static String getLocalHostName() throws UnknownHostException {
|
||||
|
@ -379,7 +380,7 @@ public class SecurityUtil {
|
|||
}
|
||||
host = addr.getAddress().getHostAddress();
|
||||
} else {
|
||||
host = addr.getHostName().toLowerCase();
|
||||
host = StringUtils.toLowerCase(addr.getHostName());
|
||||
}
|
||||
return new Text(host + ":" + addr.getPort());
|
||||
}
|
||||
|
@ -606,7 +607,8 @@ public class SecurityUtil {
|
|||
public static AuthenticationMethod getAuthenticationMethod(Configuration conf) {
|
||||
String value = conf.get(HADOOP_SECURITY_AUTHENTICATION, "simple");
|
||||
try {
|
||||
return Enum.valueOf(AuthenticationMethod.class, value.toUpperCase(Locale.ENGLISH));
|
||||
return Enum.valueOf(AuthenticationMethod.class,
|
||||
StringUtils.toUpperCase(value));
|
||||
} catch (IllegalArgumentException iae) {
|
||||
throw new IllegalArgumentException("Invalid attribute value for " +
|
||||
HADOOP_SECURITY_AUTHENTICATION + " of " + value);
|
||||
|
@ -619,7 +621,7 @@ public class SecurityUtil {
|
|||
authenticationMethod = AuthenticationMethod.SIMPLE;
|
||||
}
|
||||
conf.set(HADOOP_SECURITY_AUTHENTICATION,
|
||||
authenticationMethod.toString().toLowerCase(Locale.ENGLISH));
|
||||
StringUtils.toLowerCase(authenticationMethod.toString()));
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
|
@ -138,7 +138,8 @@ public class WhitelistBasedResolver extends SaslPropertiesResolver {
|
|||
QualityOfProtection.PRIVACY.toString());
|
||||
|
||||
for (int i=0; i < qop.length; i++) {
|
||||
qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase()).getSaslQop();
|
||||
qop[i] = QualityOfProtection.valueOf(
|
||||
StringUtils.toUpperCase(qop[i])).getSaslQop();
|
||||
}
|
||||
|
||||
saslProps.put(Sasl.QOP, StringUtils.join(",", qop));
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.commons.logging.LogFactory;
|
|||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import javax.net.ssl.KeyManager;
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
|
@ -94,7 +95,8 @@ public class FileBasedKeyStoresFactory implements KeyStoresFactory {
|
|||
@VisibleForTesting
|
||||
public static String resolvePropertyName(SSLFactory.Mode mode,
|
||||
String template) {
|
||||
return MessageFormat.format(template, mode.toString().toLowerCase());
|
||||
return MessageFormat.format(
|
||||
template, StringUtils.toLowerCase(mode.toString()));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.hadoop.classification.InterfaceStability;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import static org.apache.hadoop.util.PlatformName.IBM_JAVA;
|
||||
|
||||
import javax.net.ssl.HostnameVerifier;
|
||||
|
@ -137,8 +138,8 @@ public class SSLFactory implements ConnectionConfigurator {
|
|||
|
||||
private HostnameVerifier getHostnameVerifier(Configuration conf)
|
||||
throws GeneralSecurityException, IOException {
|
||||
return getHostnameVerifier(conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").
|
||||
trim().toUpperCase());
|
||||
return getHostnameVerifier(StringUtils.toUpperCase(
|
||||
conf.get(SSL_HOSTNAME_VERIFIER_KEY, "DEFAULT").trim()));
|
||||
}
|
||||
|
||||
public static HostnameVerifier getHostnameVerifier(String verifier)
|
||||
|
|
|
@ -52,6 +52,7 @@ import javax.net.ssl.SSLSocket;
|
|||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
************************************************************************
|
||||
|
@ -365,7 +366,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
|
|||
buf.append('<');
|
||||
for (int i = 0; i < hosts.length; i++) {
|
||||
String h = hosts[i];
|
||||
h = h != null ? h.trim().toLowerCase() : "";
|
||||
h = h != null ? StringUtils.toLowerCase(h.trim()) : "";
|
||||
hosts[i] = h;
|
||||
if (i > 0) {
|
||||
buf.append('/');
|
||||
|
@ -406,7 +407,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
|
|||
out:
|
||||
for (Iterator<String> it = names.iterator(); it.hasNext();) {
|
||||
// Don't trim the CN, though!
|
||||
final String cn = it.next().toLowerCase();
|
||||
final String cn = StringUtils.toLowerCase(it.next());
|
||||
// Store CN in StringBuffer in case we need to report an error.
|
||||
buf.append(" <");
|
||||
buf.append(cn);
|
||||
|
@ -424,7 +425,8 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
|
|||
acceptableCountryWildcard(cn);
|
||||
|
||||
for (int i = 0; i < hosts.length; i++) {
|
||||
final String hostName = hosts[i].trim().toLowerCase();
|
||||
final String hostName =
|
||||
StringUtils.toLowerCase(hosts[i].trim());
|
||||
if (doWildcard) {
|
||||
match = hostName.endsWith(cn.substring(1));
|
||||
if (match && strictWithSubDomains) {
|
||||
|
@ -479,7 +481,7 @@ public interface SSLHostnameVerifier extends javax.net.ssl.HostnameVerifier {
|
|||
}
|
||||
|
||||
public static boolean isLocalhost(String host) {
|
||||
host = host != null ? host.trim().toLowerCase() : "";
|
||||
host = host != null ? StringUtils.toLowerCase(host.trim()) : "";
|
||||
if (host.startsWith("::1")) {
|
||||
int x = host.lastIndexOf('%');
|
||||
if (x >= 0) {
|
||||
|
|
|
@ -47,6 +47,7 @@ import org.apache.hadoop.security.token.Token;
|
|||
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
|
||||
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
|
||||
import org.apache.hadoop.util.HttpExceptionUtils;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.codehaus.jackson.map.ObjectMapper;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
@ -169,7 +170,7 @@ public abstract class DelegationTokenAuthenticationHandler
|
|||
boolean requestContinues = true;
|
||||
String op = ServletUtils.getParameter(request,
|
||||
KerberosDelegationTokenAuthenticator.OP_PARAM);
|
||||
op = (op != null) ? op.toUpperCase() : null;
|
||||
op = (op != null) ? StringUtils.toUpperCase(op) : null;
|
||||
if (DELEGATION_TOKEN_OPS.contains(op) &&
|
||||
!request.getMethod().equals("OPTIONS")) {
|
||||
KerberosDelegationTokenAuthenticator.DelegationTokenOperation dtOp =
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
|
|||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
|
||||
import org.apache.hadoop.util.HttpExceptionUtils;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.codehaus.jackson.map.ObjectMapper;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -286,7 +287,7 @@ public abstract class DelegationTokenAuthenticator implements Authenticator {
|
|||
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
|
||||
if (hasResponse) {
|
||||
String contentType = conn.getHeaderField(CONTENT_TYPE);
|
||||
contentType = (contentType != null) ? contentType.toLowerCase()
|
||||
contentType = (contentType != null) ? StringUtils.toLowerCase(contentType)
|
||||
: null;
|
||||
if (contentType != null &&
|
||||
contentType.contains(APPLICATION_JSON_MIME)) {
|
||||
|
|
|
@ -37,7 +37,6 @@ import java.util.Arrays;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.ListIterator;
|
||||
import java.util.Locale;
|
||||
import java.util.Properties;
|
||||
import java.util.Stack;
|
||||
|
||||
|
@ -363,7 +362,7 @@ public class ComparableVersion
|
|||
|
||||
items = new ListItem();
|
||||
|
||||
version = version.toLowerCase( Locale.ENGLISH );
|
||||
version = StringUtils.toLowerCase(version);
|
||||
|
||||
ListItem list = items;
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
package org.apache.hadoop.util;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.StringWriter;
|
||||
import java.net.URI;
|
||||
|
@ -901,7 +902,7 @@ public class StringUtils {
|
|||
*/
|
||||
public static String camelize(String s) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
String[] words = split(s.toLowerCase(Locale.US), ESCAPE_CHAR, '_');
|
||||
String[] words = split(StringUtils.toLowerCase(s), ESCAPE_CHAR, '_');
|
||||
|
||||
for (String word : words)
|
||||
sb.append(org.apache.commons.lang.StringUtils.capitalize(word));
|
||||
|
@ -1032,4 +1033,41 @@ public class StringUtils {
|
|||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts all of the characters in this String to lower case with
|
||||
* Locale.ENGLISH.
|
||||
*
|
||||
* @param str string to be converted
|
||||
* @return the str, converted to lowercase.
|
||||
*/
|
||||
public static String toLowerCase(String str) {
|
||||
return str.toLowerCase(Locale.ENGLISH);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts all of the characters in this String to upper case with
|
||||
* Locale.ENGLISH.
|
||||
*
|
||||
* @param str string to be converted
|
||||
* @return the str, converted to uppercase.
|
||||
*/
|
||||
public static String toUpperCase(String str) {
|
||||
return str.toUpperCase(Locale.ENGLISH);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare strings locale-freely by using String#equalsIgnoreCase.
|
||||
*
|
||||
* @param s1 Non-null string to be converted
|
||||
* @param s2 string to be converted
|
||||
* @return the str, converted to uppercase.
|
||||
*/
|
||||
public static boolean equalsIgnoreCase(String s1, String s2) {
|
||||
Preconditions.checkNotNull(s1);
|
||||
// don't check non-null against s2 to make the semantics same as
|
||||
// s1.equals(s2)
|
||||
return s1.equalsIgnoreCase(s2);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.hadoop.fs;
|
|||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
|
@ -28,6 +27,7 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
|
@ -527,7 +527,7 @@ public abstract class FileSystemContractBaseTest extends TestCase {
|
|||
}
|
||||
String mixedCaseFilename = "/test/UPPER.TXT";
|
||||
Path upper = path(mixedCaseFilename);
|
||||
Path lower = path(mixedCaseFilename.toLowerCase(Locale.ENGLISH));
|
||||
Path lower = path(StringUtils.toLowerCase(mixedCaseFilename));
|
||||
assertFalse("File exists" + upper, fs.exists(upper));
|
||||
assertFalse("File exists" + lower, fs.exists(lower));
|
||||
FSDataOutputStream out = fs.create(upper);
|
||||
|
|
|
@ -1296,7 +1296,7 @@ public class TestIPC {
|
|||
|
||||
StringBuilder hexString = new StringBuilder();
|
||||
|
||||
for (String line : hexdump.toUpperCase().split("\n")) {
|
||||
for (String line : StringUtils.toUpperCase(hexdump).split("\n")) {
|
||||
hexString.append(line.substring(0, LAST_HEX_COL).replace(" ", ""));
|
||||
}
|
||||
return StringUtils.hexStringToByte(hexString.toString());
|
||||
|
|
|
@ -181,7 +181,7 @@ public class TestSaslRPC {
|
|||
StringBuilder sb = new StringBuilder();
|
||||
int i = 0;
|
||||
for (QualityOfProtection qop:qops){
|
||||
sb.append(qop.name().toLowerCase());
|
||||
sb.append(org.apache.hadoop.util.StringUtils.toLowerCase(qop.name()));
|
||||
if (++i < qops.length){
|
||||
sb.append(",");
|
||||
}
|
||||
|
|
|
@ -18,13 +18,13 @@ package org.apache.hadoop.security;
|
|||
|
||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION;
|
||||
import static org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod.*;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.URI;
|
||||
import java.util.Locale;
|
||||
|
||||
import javax.security.auth.kerberos.KerberosPrincipal;
|
||||
|
||||
|
@ -33,6 +33,7 @@ import org.apache.hadoop.io.Text;
|
|||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.mockito.Mockito;
|
||||
|
@ -103,13 +104,14 @@ public class TestSecurityUtil {
|
|||
String realm = "@REALM";
|
||||
String principalInConf = service + SecurityUtil.HOSTNAME_PATTERN + realm;
|
||||
String hostname = "FooHost";
|
||||
String principal = service + hostname.toLowerCase() + realm;
|
||||
String principal =
|
||||
service + StringUtils.toLowerCase(hostname) + realm;
|
||||
verify(principalInConf, hostname, principal);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLocalHostNameForNullOrWild() throws Exception {
|
||||
String local = SecurityUtil.getLocalHostName().toLowerCase(Locale.US);
|
||||
String local = StringUtils.toLowerCase(SecurityUtil.getLocalHostName());
|
||||
assertEquals("hdfs/" + local + "@REALM",
|
||||
SecurityUtil.getServerPrincipal("hdfs/_HOST@REALM", (String)null));
|
||||
assertEquals("hdfs/" + local + "@REALM",
|
||||
|
@ -260,7 +262,7 @@ public class TestSecurityUtil {
|
|||
//LOG.info("address:"+addr+" host:"+host+" ip:"+ip+" port:"+port);
|
||||
|
||||
SecurityUtil.setTokenServiceUseIp(useIp);
|
||||
String serviceHost = useIp ? ip : host.toLowerCase();
|
||||
String serviceHost = useIp ? ip : StringUtils.toLowerCase(host);
|
||||
|
||||
Token<?> token = new Token<TokenIdentifier>();
|
||||
Text service = new Text(serviceHost+":"+port);
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.hadoop.security.authentication.util.KerberosName;
|
|||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.junit.*;
|
||||
|
||||
import javax.security.auth.Subject;
|
||||
|
@ -213,7 +214,7 @@ public class TestUserGroupInformation {
|
|||
userName = userName.substring(sp + 1);
|
||||
}
|
||||
// user names are case insensitive on Windows. Make consistent
|
||||
userName = userName.toLowerCase();
|
||||
userName = StringUtils.toLowerCase(userName);
|
||||
}
|
||||
// get the groups
|
||||
pp = Runtime.getRuntime().exec(Shell.WINDOWS ?
|
||||
|
@ -233,7 +234,7 @@ public class TestUserGroupInformation {
|
|||
String loginUserName = login.getShortUserName();
|
||||
if(Shell.WINDOWS) {
|
||||
// user names are case insensitive on Windows. Make consistent
|
||||
loginUserName = loginUserName.toLowerCase();
|
||||
loginUserName = StringUtils.toLowerCase(loginUserName);
|
||||
}
|
||||
assertEquals(userName, loginUserName);
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ import java.text.SimpleDateFormat;
|
|||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.junit.runner.notification.Failure;
|
||||
import org.junit.runner.notification.RunListener;
|
||||
|
||||
|
@ -93,8 +94,9 @@ public class TimedOutTestsListener extends RunListener {
|
|||
thread.getPriority(),
|
||||
thread.getId(),
|
||||
Thread.State.WAITING.equals(thread.getState()) ?
|
||||
"in Object.wait()" : thread.getState().name().toLowerCase(),
|
||||
Thread.State.WAITING.equals(thread.getState()) ?
|
||||
"in Object.wait()" :
|
||||
StringUtils.toLowerCase(thread.getState().name()),
|
||||
Thread.State.WAITING.equals(thread.getState()) ?
|
||||
"WAITING (on object monitor)" : thread.getState()));
|
||||
for (StackTraceElement stackTraceElement : e.getValue()) {
|
||||
dump.append("\n at ");
|
||||
|
|
|
@ -18,10 +18,12 @@
|
|||
|
||||
package org.apache.hadoop.util;
|
||||
|
||||
import java.util.Locale;
|
||||
import static org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.long2String;
|
||||
import static org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix.string2long;
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
|
@ -412,6 +414,25 @@ public class TestStringUtils extends UnitTestcaseTimeLimit {
|
|||
assertTrue(col.containsAll(Arrays.asList(new String[]{"foo","bar","baz","blah"})));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLowerAndUpperStrings() {
|
||||
Locale defaultLocale = Locale.getDefault();
|
||||
try {
|
||||
Locale.setDefault(new Locale("tr", "TR"));
|
||||
String upperStr = "TITLE";
|
||||
String lowerStr = "title";
|
||||
// Confirming TR locale.
|
||||
assertNotEquals(lowerStr, upperStr.toLowerCase());
|
||||
assertNotEquals(upperStr, lowerStr.toUpperCase());
|
||||
// This should be true regardless of locale.
|
||||
assertEquals(lowerStr, StringUtils.toLowerCase(upperStr));
|
||||
assertEquals(upperStr, StringUtils.toUpperCase(lowerStr));
|
||||
assertTrue(StringUtils.equalsIgnoreCase(upperStr, lowerStr));
|
||||
} finally {
|
||||
Locale.setDefault(defaultLocale);
|
||||
}
|
||||
}
|
||||
|
||||
// Benchmark for StringUtils split
|
||||
public static void main(String []args) {
|
||||
final String TO_SPLIT = "foo,bar,baz,blah,blah";
|
||||
|
|
|
@ -382,8 +382,10 @@ public class TestWinUtils {
|
|||
private void assertOwners(File file, String expectedUser,
|
||||
String expectedGroup) throws IOException {
|
||||
String [] args = lsF(file).trim().split("[\\|]");
|
||||
assertEquals(expectedUser.toLowerCase(), args[2].toLowerCase());
|
||||
assertEquals(expectedGroup.toLowerCase(), args[3].toLowerCase());
|
||||
assertEquals(StringUtils.toLowerCase(expectedUser),
|
||||
StringUtils.toLowerCase(args[2]));
|
||||
assertEquals(StringUtils.toLowerCase(expectedGroup),
|
||||
StringUtils.toLowerCase(args[3]));
|
||||
}
|
||||
|
||||
@Test (timeout = 30000)
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
|
|||
import org.apache.hadoop.util.LightWeightCache;
|
||||
import org.apache.hadoop.util.LightWeightGSet;
|
||||
import org.apache.hadoop.util.LightWeightGSet.LinkedElement;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
|
||||
|
@ -359,10 +360,10 @@ public class NfsExports {
|
|||
AccessPrivilege privilege = AccessPrivilege.READ_ONLY;
|
||||
switch (parts.length) {
|
||||
case 1:
|
||||
host = parts[0].toLowerCase().trim();
|
||||
host = StringUtils.toLowerCase(parts[0]).trim();
|
||||
break;
|
||||
case 2:
|
||||
host = parts[0].toLowerCase().trim();
|
||||
host = StringUtils.toLowerCase(parts[0]).trim();
|
||||
String option = parts[1].trim();
|
||||
if ("rw".equalsIgnoreCase(option)) {
|
||||
privilege = AccessPrivilege.READ_WRITE;
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.fs.http.server;
|
|||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import javax.servlet.Filter;
|
||||
import javax.servlet.FilterChain;
|
||||
|
@ -82,7 +83,8 @@ public class CheckUploadContentTypeFilter implements Filter {
|
|||
String method = httpReq.getMethod();
|
||||
if (method.equals("PUT") || method.equals("POST")) {
|
||||
String op = httpReq.getParameter(HttpFSFileSystem.OP_PARAM);
|
||||
if (op != null && UPLOAD_OPERATIONS.contains(op.toUpperCase())) {
|
||||
if (op != null && UPLOAD_OPERATIONS.contains(
|
||||
StringUtils.toUpperCase(op))) {
|
||||
if ("true".equalsIgnoreCase(httpReq.getParameter(HttpFSParametersProvider.DataParam.NAME))) {
|
||||
String contentType = httpReq.getContentType();
|
||||
contentTypeOK =
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.hadoop.fs.permission.FsPermission;
|
|||
import org.apache.hadoop.hdfs.protocol.AclException;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.lib.service.FileSystemAccess;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.json.simple.JSONArray;
|
||||
import org.json.simple.JSONObject;
|
||||
|
||||
|
@ -439,7 +440,8 @@ public class FSOperations {
|
|||
@Override
|
||||
public JSONObject execute(FileSystem fs) throws IOException {
|
||||
boolean result = fs.truncate(path, newLength);
|
||||
return toJSON(HttpFSFileSystem.TRUNCATE_JSON.toLowerCase(), result);
|
||||
return toJSON(
|
||||
StringUtils.toLowerCase(HttpFSFileSystem.TRUNCATE_JSON), result);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -568,7 +570,8 @@ public class FSOperations {
|
|||
@Override
|
||||
public JSONObject execute(FileSystem fs) throws IOException {
|
||||
boolean deleted = fs.delete(path, recursive);
|
||||
return toJSON(HttpFSFileSystem.DELETE_JSON.toLowerCase(), deleted);
|
||||
return toJSON(
|
||||
StringUtils.toLowerCase(HttpFSFileSystem.DELETE_JSON), deleted);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.hadoop.lib.wsrs.Param;
|
|||
import org.apache.hadoop.lib.wsrs.ParametersProvider;
|
||||
import org.apache.hadoop.lib.wsrs.ShortParam;
|
||||
import org.apache.hadoop.lib.wsrs.StringParam;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import javax.ws.rs.ext.Provider;
|
||||
import java.util.HashMap;
|
||||
|
@ -168,7 +169,8 @@ public class HttpFSParametersProvider extends ParametersProvider {
|
|||
*/
|
||||
public OperationParam(String operation) {
|
||||
super(NAME, HttpFSFileSystem.Operation.class,
|
||||
HttpFSFileSystem.Operation.valueOf(operation.toUpperCase()));
|
||||
HttpFSFileSystem.Operation.valueOf(
|
||||
StringUtils.toUpperCase(operation)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.lib.util.Check;
|
||||
import org.apache.hadoop.lib.util.ConfigurationUtils;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.log4j.LogManager;
|
||||
import org.apache.log4j.PropertyConfigurator;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -202,7 +203,7 @@ public class Server {
|
|||
* @param config server configuration.
|
||||
*/
|
||||
public Server(String name, String homeDir, String configDir, String logDir, String tempDir, Configuration config) {
|
||||
this.name = Check.notEmpty(name, "name").trim().toLowerCase();
|
||||
this.name = StringUtils.toLowerCase(Check.notEmpty(name, "name").trim());
|
||||
this.homeDir = Check.notEmpty(homeDir, "homeDir");
|
||||
this.configDir = Check.notEmpty(configDir, "configDir");
|
||||
this.logDir = Check.notEmpty(logDir, "logDir");
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.hadoop.lib.service.Scheduler;
|
|||
import org.apache.hadoop.lib.util.Check;
|
||||
import org.apache.hadoop.lib.util.ConfigurationUtils;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.VersionInfo;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -254,7 +255,7 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
|
|||
private Set<String> toLowerCase(Collection<String> collection) {
|
||||
Set<String> set = new HashSet<String>();
|
||||
for (String value : collection) {
|
||||
set.add(value.toLowerCase());
|
||||
set.add(StringUtils.toLowerCase(value));
|
||||
}
|
||||
return set;
|
||||
}
|
||||
|
@ -300,7 +301,8 @@ public class FileSystemAccessService extends BaseService implements FileSystemAc
|
|||
|
||||
protected void validateNamenode(String namenode) throws FileSystemAccessException {
|
||||
if (nameNodeWhitelist.size() > 0 && !nameNodeWhitelist.contains("*")) {
|
||||
if (!nameNodeWhitelist.contains(namenode.toLowerCase())) {
|
||||
if (!nameNodeWhitelist.contains(
|
||||
StringUtils.toLowerCase(namenode))) {
|
||||
throw new FileSystemAccessException(FileSystemAccessException.ERROR.H05, namenode, "not in whitelist");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ public abstract class EnumParam<E extends Enum<E>> extends Param<E> {
|
|||
|
||||
@Override
|
||||
protected E parse(String str) throws Exception {
|
||||
return Enum.valueOf(klass, str.toUpperCase());
|
||||
return Enum.valueOf(klass, StringUtils.toUpperCase(str));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.util.EnumSet;
|
|||
import java.util.Iterator;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>> {
|
||||
|
@ -37,7 +38,7 @@ public abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>>
|
|||
final EnumSet<E> set = EnumSet.noneOf(klass);
|
||||
if (!str.isEmpty()) {
|
||||
for (String sub : str.split(",")) {
|
||||
set.add(Enum.valueOf(klass, sub.trim().toUpperCase()));
|
||||
set.add(Enum.valueOf(klass, StringUtils.toUpperCase(sub.trim())));
|
||||
}
|
||||
}
|
||||
return set;
|
||||
|
|
|
@ -26,6 +26,7 @@ import com.sun.jersey.server.impl.inject.AbstractHttpContextInjectable;
|
|||
import com.sun.jersey.spi.inject.Injectable;
|
||||
import com.sun.jersey.spi.inject.InjectableProvider;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MultivaluedMap;
|
||||
|
@ -70,7 +71,7 @@ public class ParametersProvider
|
|||
}
|
||||
Enum op;
|
||||
try {
|
||||
op = Enum.valueOf(enumClass, str.toUpperCase());
|
||||
op = Enum.valueOf(enumClass, StringUtils.toUpperCase(str));
|
||||
} catch (IllegalArgumentException ex) {
|
||||
throw new IllegalArgumentException(
|
||||
MessageFormat.format("Invalid Operation [{0}]", str));
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException;
|
|||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.fs.XAttr;
|
||||
import org.apache.hadoop.fs.XAttr.NameSpace;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.Lists;
|
||||
|
@ -57,16 +58,20 @@ public class XAttrHelper {
|
|||
}
|
||||
|
||||
NameSpace ns;
|
||||
final String prefix = name.substring(0, prefixIndex).toLowerCase();
|
||||
if (prefix.equals(NameSpace.USER.toString().toLowerCase())) {
|
||||
final String prefix = name.substring(0, prefixIndex);
|
||||
if (StringUtils.equalsIgnoreCase(prefix, NameSpace.USER.toString())) {
|
||||
ns = NameSpace.USER;
|
||||
} else if (prefix.equals(NameSpace.TRUSTED.toString().toLowerCase())) {
|
||||
} else if (
|
||||
StringUtils.equalsIgnoreCase(prefix, NameSpace.TRUSTED.toString())) {
|
||||
ns = NameSpace.TRUSTED;
|
||||
} else if (prefix.equals(NameSpace.SYSTEM.toString().toLowerCase())) {
|
||||
} else if (
|
||||
StringUtils.equalsIgnoreCase(prefix, NameSpace.SYSTEM.toString())) {
|
||||
ns = NameSpace.SYSTEM;
|
||||
} else if (prefix.equals(NameSpace.SECURITY.toString().toLowerCase())) {
|
||||
} else if (
|
||||
StringUtils.equalsIgnoreCase(prefix, NameSpace.SECURITY.toString())) {
|
||||
ns = NameSpace.SECURITY;
|
||||
} else if (prefix.equals(NameSpace.RAW.toString().toLowerCase())) {
|
||||
} else if (
|
||||
StringUtils.equalsIgnoreCase(prefix, NameSpace.RAW.toString())) {
|
||||
ns = NameSpace.RAW;
|
||||
} else {
|
||||
throw new HadoopIllegalArgumentException("An XAttr name must be " +
|
||||
|
@ -145,7 +150,7 @@ public class XAttrHelper {
|
|||
}
|
||||
|
||||
String namespace = xAttr.getNameSpace().toString();
|
||||
return namespace.toLowerCase() + "." + xAttr.getName();
|
||||
return StringUtils.toLowerCase(namespace) + "." + xAttr.getName();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.hadoop.hdfs.HdfsConfiguration;
|
|||
import org.apache.hadoop.hdfs.server.datanode.DataNodeLayoutVersion;
|
||||
import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion;
|
||||
import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/************************************
|
||||
* Some handy constants
|
||||
|
@ -98,7 +99,7 @@ public class HdfsConstants {
|
|||
|
||||
/** Covert the given String to a RollingUpgradeAction. */
|
||||
public static RollingUpgradeAction fromString(String s) {
|
||||
return MAP.get(s.toUpperCase());
|
||||
return MAP.get(StringUtils.toUpperCase(s));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.hadoop.fs.XAttr;
|
|||
import org.apache.hadoop.hdfs.XAttrHelper;
|
||||
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -131,7 +132,8 @@ public class BlockStoragePolicySuite {
|
|||
}
|
||||
|
||||
public static String buildXAttrName() {
|
||||
return XAttrNS.toString().toLowerCase() + "." + STORAGE_POLICY_XATTR_NAME;
|
||||
return StringUtils.toLowerCase(XAttrNS.toString())
|
||||
+ "." + STORAGE_POLICY_XATTR_NAME;
|
||||
}
|
||||
|
||||
public static XAttr buildXAttr(byte policyId) {
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
|||
import org.apache.hadoop.hdfs.server.namenode.MetaRecoveryContext;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/************************************
|
||||
* Some handy internal HDFS constants
|
||||
|
@ -53,7 +54,7 @@ public final class HdfsServerConstants {
|
|||
|
||||
public String getOptionString() {
|
||||
return StartupOption.ROLLINGUPGRADE.getName() + " "
|
||||
+ name().toLowerCase();
|
||||
+ StringUtils.toLowerCase(name());
|
||||
}
|
||||
|
||||
public boolean matches(StartupOption option) {
|
||||
|
@ -84,7 +85,7 @@ public final class HdfsServerConstants {
|
|||
public static String getAllOptionString() {
|
||||
final StringBuilder b = new StringBuilder("<");
|
||||
for(RollingUpgradeStartupOption opt : VALUES) {
|
||||
b.append(opt.name().toLowerCase()).append("|");
|
||||
b.append(StringUtils.toLowerCase(opt.name())).append("|");
|
||||
}
|
||||
b.setCharAt(b.length() - 1, '>');
|
||||
return b.toString();
|
||||
|
|
|
@ -28,6 +28,7 @@ import java.util.regex.Matcher;
|
|||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
import org.apache.hadoop.hdfs.server.common.Util;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Encapsulates the URI and storage medium that together describe a
|
||||
|
@ -88,7 +89,8 @@ public class StorageLocation {
|
|||
String classString = matcher.group(1);
|
||||
location = matcher.group(2);
|
||||
if (!classString.isEmpty()) {
|
||||
storageType = StorageType.valueOf(classString.toUpperCase());
|
||||
storageType =
|
||||
StorageType.valueOf(StringUtils.toUpperCase(classString));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -123,6 +123,7 @@ import org.apache.hadoop.ipc.ClientId;
|
|||
import org.apache.hadoop.ipc.RpcConstants;
|
||||
import org.apache.hadoop.security.token.delegation.DelegationKey;
|
||||
import org.apache.hadoop.util.DataChecksum;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.xml.sax.ContentHandler;
|
||||
import org.xml.sax.SAXException;
|
||||
import org.xml.sax.helpers.AttributesImpl;
|
||||
|
@ -4348,7 +4349,7 @@ public abstract class FSEditLogOp {
|
|||
|
||||
public RollingUpgradeOp(FSEditLogOpCodes code, String name) {
|
||||
super(code);
|
||||
this.name = name.toUpperCase();
|
||||
this.name = StringUtils.toUpperCase(name);
|
||||
}
|
||||
|
||||
static RollingUpgradeOp getStartInstance(OpInstanceCache cache) {
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.namenode;
|
|||
|
||||
import com.google.common.base.Objects;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
public class QuotaByStorageTypeEntry {
|
||||
private StorageType type;
|
||||
|
@ -53,7 +54,7 @@ public class QuotaByStorageTypeEntry {
|
|||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
assert (type != null);
|
||||
sb.append(type.toString().toLowerCase());
|
||||
sb.append(StringUtils.toLowerCase(type.toString()));
|
||||
sb.append(':');
|
||||
sb.append(quota);
|
||||
return sb.toString();
|
||||
|
|
|
@ -587,7 +587,7 @@ public class SecondaryNameNode implements Runnable,
|
|||
return 0;
|
||||
}
|
||||
|
||||
String cmd = opts.getCommand().toString().toLowerCase();
|
||||
String cmd = StringUtils.toLowerCase(opts.getCommand().toString());
|
||||
|
||||
int exitCode = 0;
|
||||
try {
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.hadoop.hdfs.DFSUtil;
|
|||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||
import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.Tool;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
|
||||
|
@ -79,19 +80,19 @@ public class GetConf extends Configured implements Tool {
|
|||
private static final Map<String, CommandHandler> map;
|
||||
static {
|
||||
map = new HashMap<String, CommandHandler>();
|
||||
map.put(NAMENODE.getName().toLowerCase(),
|
||||
map.put(StringUtils.toLowerCase(NAMENODE.getName()),
|
||||
new NameNodesCommandHandler());
|
||||
map.put(SECONDARY.getName().toLowerCase(),
|
||||
map.put(StringUtils.toLowerCase(SECONDARY.getName()),
|
||||
new SecondaryNameNodesCommandHandler());
|
||||
map.put(BACKUP.getName().toLowerCase(),
|
||||
map.put(StringUtils.toLowerCase(BACKUP.getName()),
|
||||
new BackupNodesCommandHandler());
|
||||
map.put(INCLUDE_FILE.getName().toLowerCase(),
|
||||
map.put(StringUtils.toLowerCase(INCLUDE_FILE.getName()),
|
||||
new CommandHandler(DFSConfigKeys.DFS_HOSTS));
|
||||
map.put(EXCLUDE_FILE.getName().toLowerCase(),
|
||||
map.put(StringUtils.toLowerCase(EXCLUDE_FILE.getName()),
|
||||
new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE));
|
||||
map.put(NNRPCADDRESSES.getName().toLowerCase(),
|
||||
map.put(StringUtils.toLowerCase(NNRPCADDRESSES.getName()),
|
||||
new NNRpcAddressesCommandHandler());
|
||||
map.put(CONFKEY.getName().toLowerCase(),
|
||||
map.put(StringUtils.toLowerCase(CONFKEY.getName()),
|
||||
new PrintConfKeyCommandHandler());
|
||||
}
|
||||
|
||||
|
@ -116,7 +117,7 @@ public class GetConf extends Configured implements Tool {
|
|||
}
|
||||
|
||||
public static CommandHandler getHandler(String cmd) {
|
||||
return map.get(cmd.toLowerCase());
|
||||
return map.get(StringUtils.toLowerCase(cmd));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.io.OutputStream;
|
|||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* EditsVisitorFactory for different implementations of EditsVisitor
|
||||
|
@ -43,7 +44,7 @@ public class OfflineEditsVisitorFactory {
|
|||
*/
|
||||
static public OfflineEditsVisitor getEditsVisitor(String filename,
|
||||
String processor, boolean printToScreen) throws IOException {
|
||||
if(processor.toLowerCase().equals("binary")) {
|
||||
if(StringUtils.equalsIgnoreCase("binary", processor)) {
|
||||
return new BinaryEditsVisitor(filename);
|
||||
}
|
||||
OfflineEditsVisitor vis;
|
||||
|
@ -59,9 +60,9 @@ public class OfflineEditsVisitorFactory {
|
|||
outs[1] = System.out;
|
||||
out = new TeeOutputStream(outs);
|
||||
}
|
||||
if(processor.toLowerCase().equals("xml")) {
|
||||
if(StringUtils.equalsIgnoreCase("xml", processor)) {
|
||||
vis = new XmlEditsVisitor(out);
|
||||
} else if(processor.toLowerCase().equals("stats")) {
|
||||
} else if(StringUtils.equalsIgnoreCase("stats", processor)) {
|
||||
vis = new StatisticsEditsVisitor(out);
|
||||
} else {
|
||||
throw new IOException("Unknown proccesor " + processor +
|
||||
|
|
|
@ -33,6 +33,7 @@ import io.netty.handler.codec.http.QueryStringDecoder;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hdfs.web.JsonUtil;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
|
@ -51,6 +52,7 @@ import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1;
|
|||
import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.APPLICATION_JSON_UTF8;
|
||||
import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.WEBHDFS_PREFIX;
|
||||
import static org.apache.hadoop.hdfs.server.datanode.web.webhdfs.WebHdfsHandler.WEBHDFS_PREFIX_LENGTH;
|
||||
|
||||
/**
|
||||
* Implement the read-only WebHDFS API for fsimage.
|
||||
*/
|
||||
|
@ -141,7 +143,7 @@ class FSImageHandler extends SimpleChannelInboundHandler<HttpRequest> {
|
|||
private static String getOp(QueryStringDecoder decoder) {
|
||||
Map<String, List<String>> parameters = decoder.parameters();
|
||||
return parameters.containsKey("op")
|
||||
? parameters.get("op").get(0).toUpperCase() : null;
|
||||
? StringUtils.toUpperCase(parameters.get("op").get(0)) : null;
|
||||
}
|
||||
|
||||
private static String getPath(QueryStringDecoder decoder)
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.hadoop.security.UserGroupInformation;
|
|||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
|
||||
import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Subclass of {@link AuthenticationFilter} that
|
||||
|
@ -96,7 +97,7 @@ public class AuthFilter extends AuthenticationFilter {
|
|||
|
||||
final Map<String, List<String>> m = new HashMap<String, List<String>>();
|
||||
for(Map.Entry<String, String[]> entry : original.entrySet()) {
|
||||
final String key = entry.getKey().toLowerCase();
|
||||
final String key = StringUtils.toLowerCase(entry.getKey());
|
||||
List<String> strings = m.get(key);
|
||||
if (strings == null) {
|
||||
strings = new ArrayList<String>();
|
||||
|
|
|
@ -28,6 +28,7 @@ import com.sun.jersey.spi.container.ContainerRequest;
|
|||
import com.sun.jersey.spi.container.ContainerRequestFilter;
|
||||
import com.sun.jersey.spi.container.ContainerResponseFilter;
|
||||
import com.sun.jersey.spi.container.ResourceFilter;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* A filter to change parameter names to lower cases
|
||||
|
@ -75,7 +76,7 @@ public class ParamFilter implements ResourceFilter {
|
|||
final MultivaluedMap<String, String> parameters) {
|
||||
UriBuilder b = UriBuilder.fromUri(uri).replaceQuery("");
|
||||
for(Map.Entry<String, List<String>> e : parameters.entrySet()) {
|
||||
final String key = e.getKey().toLowerCase();
|
||||
final String key = StringUtils.toLowerCase(e.getKey());
|
||||
for(String v : e.getValue()) {
|
||||
b = b.queryParam(key, v);
|
||||
}
|
||||
|
|
|
@ -80,6 +80,7 @@ import org.apache.hadoop.security.token.TokenIdentifier;
|
|||
import org.apache.hadoop.security.token.TokenSelector;
|
||||
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
|
||||
import org.apache.hadoop.util.Progressable;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.mortbay.util.ajax.JSON;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
@ -1242,7 +1243,7 @@ public class WebHdfsFileSystem extends FileSystem
|
|||
if (query == null) {
|
||||
return url;
|
||||
}
|
||||
final String lower = query.toLowerCase();
|
||||
final String lower = StringUtils.toLowerCase(query);
|
||||
if (!lower.startsWith(OFFSET_PARAM_PREFIX)
|
||||
&& !lower.contains("&" + OFFSET_PARAM_PREFIX)) {
|
||||
return url;
|
||||
|
@ -1253,7 +1254,7 @@ public class WebHdfsFileSystem extends FileSystem
|
|||
for(final StringTokenizer st = new StringTokenizer(query, "&");
|
||||
st.hasMoreTokens();) {
|
||||
final String token = st.nextToken();
|
||||
if (!token.toLowerCase().startsWith(OFFSET_PARAM_PREFIX)) {
|
||||
if (!StringUtils.toLowerCase(token).startsWith(OFFSET_PARAM_PREFIX)) {
|
||||
if (b == null) {
|
||||
b = new StringBuilder("?").append(token);
|
||||
} else {
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.hadoop.hdfs.web.resources;
|
||||
|
||||
import java.util.Arrays;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
abstract class EnumParam<E extends Enum<E>> extends Param<E, EnumParam.Domain<E>> {
|
||||
EnumParam(final Domain<E> domain, final E value) {
|
||||
|
@ -40,7 +41,7 @@ abstract class EnumParam<E extends Enum<E>> extends Param<E, EnumParam.Domain<E>
|
|||
|
||||
@Override
|
||||
final E parse(final String str) {
|
||||
return Enum.valueOf(enumClass, str.toUpperCase());
|
||||
return Enum.valueOf(enumClass, StringUtils.toUpperCase(str));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hdfs.web.resources;
|
|||
import java.util.Arrays;
|
||||
import java.util.EnumSet;
|
||||
import java.util.Iterator;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>, EnumSetParam.Domain<E>> {
|
||||
/** Convert an EnumSet to a string of comma separated values. */
|
||||
|
@ -82,7 +83,7 @@ abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>, EnumSet
|
|||
i = j > 0 ? j + 1 : 0;
|
||||
j = str.indexOf(',', i);
|
||||
final String sub = j >= 0? str.substring(i, j): str.substring(i);
|
||||
set.add(Enum.valueOf(enumClass, sub.trim().toUpperCase()));
|
||||
set.add(Enum.valueOf(enumClass, StringUtils.toUpperCase(sub.trim())));
|
||||
}
|
||||
}
|
||||
return set;
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.apache.hadoop.hdfs.server.namenode.snapshot;
|
||||
|
||||
import static org.mockito.Matchers.anyObject;
|
||||
import static org.mockito.Matchers.anyString;
|
||||
import static org.mockito.Mockito.doReturn;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.spy;
|
||||
|
@ -31,6 +30,7 @@ import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
|
|||
import org.apache.hadoop.hdfs.server.namenode.INode;
|
||||
import org.apache.hadoop.hdfs.server.namenode.INodeDirectory;
|
||||
import org.apache.hadoop.hdfs.server.namenode.INodesInPath;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -70,7 +70,7 @@ public class TestSnapshotManager {
|
|||
Assert.fail("Expected SnapshotException not thrown");
|
||||
} catch (SnapshotException se) {
|
||||
Assert.assertTrue(
|
||||
se.getMessage().toLowerCase().contains("rollover"));
|
||||
StringUtils.toLowerCase(se.getMessage()).contains("rollover"));
|
||||
}
|
||||
|
||||
// Delete a snapshot to free up a slot.
|
||||
|
@ -86,7 +86,7 @@ public class TestSnapshotManager {
|
|||
Assert.fail("Expected SnapshotException not thrown");
|
||||
} catch (SnapshotException se) {
|
||||
Assert.assertTrue(
|
||||
se.getMessage().toLowerCase().contains("rollover"));
|
||||
StringUtils.toLowerCase(se.getMessage()).contains("rollover"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,6 +59,7 @@ import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
|
|||
import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.service.AbstractService;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
|
||||
import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
|
||||
import org.apache.hadoop.yarn.client.api.TimelineClient;
|
||||
|
@ -711,7 +712,7 @@ public class JobHistoryEventHandler extends AbstractService
|
|||
private void processEventForTimelineServer(HistoryEvent event, JobId jobId,
|
||||
long timestamp) {
|
||||
TimelineEvent tEvent = new TimelineEvent();
|
||||
tEvent.setEventType(event.getEventType().name().toUpperCase());
|
||||
tEvent.setEventType(StringUtils.toUpperCase(event.getEventType().name()));
|
||||
tEvent.setTimestamp(timestamp);
|
||||
TimelineEntity tEntity = new TimelineEntity();
|
||||
|
||||
|
|
|
@ -22,7 +22,6 @@ import static org.apache.hadoop.yarn.util.StringHelper.join;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.net.URLDecoder;
|
||||
import java.util.Locale;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
|
@ -226,8 +225,9 @@ public class AppController extends Controller implements AMParams {
|
|||
if (app.getJob() != null) {
|
||||
try {
|
||||
String tt = $(TASK_TYPE);
|
||||
tt = tt.isEmpty() ? "All" : StringUtils.capitalize(MRApps.taskType(tt).
|
||||
toString().toLowerCase(Locale.US));
|
||||
tt = tt.isEmpty() ? "All" : StringUtils.capitalize(
|
||||
org.apache.hadoop.util.StringUtils.toLowerCase(
|
||||
MRApps.taskType(tt).toString()));
|
||||
setTitle(join(tt, " Tasks for ", $(JOB_ID)));
|
||||
} catch (Exception e) {
|
||||
LOG.error("Failed to render tasks page with task type : "
|
||||
|
|
|
@ -41,6 +41,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
|||
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
|
||||
|
@ -314,7 +315,7 @@ public class TypeConverter {
|
|||
QueueState state) {
|
||||
org.apache.hadoop.mapreduce.QueueState qState =
|
||||
org.apache.hadoop.mapreduce.QueueState.getState(
|
||||
state.toString().toLowerCase());
|
||||
StringUtils.toLowerCase(state.toString()));
|
||||
return qState;
|
||||
}
|
||||
|
||||
|
|
|
@ -303,7 +303,7 @@ public class MRApps extends Apps {
|
|||
remoteFS.getWorkingDirectory()));
|
||||
String name = (null == u.getFragment())
|
||||
? p.getName() : u.getFragment();
|
||||
if (!name.toLowerCase().endsWith(".jar")) {
|
||||
if (!StringUtils.toLowerCase(name).endsWith(".jar")) {
|
||||
linkLookup.put(p, name);
|
||||
}
|
||||
}
|
||||
|
@ -317,7 +317,7 @@ public class MRApps extends Apps {
|
|||
if (name == null) {
|
||||
name = p.getName();
|
||||
}
|
||||
if(!name.toLowerCase().endsWith(".jar")) {
|
||||
if(!StringUtils.toLowerCase(name).endsWith(".jar")) {
|
||||
MRApps.addToEnvironment(
|
||||
environment,
|
||||
classpathEnvVar,
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.mapreduce;
|
||||
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
|
@ -151,9 +152,10 @@ public class TestTypeConverter {
|
|||
.newRecord(org.apache.hadoop.yarn.api.records.QueueInfo.class);
|
||||
queueInfo.setQueueState(org.apache.hadoop.yarn.api.records.QueueState.STOPPED);
|
||||
org.apache.hadoop.mapreduce.QueueInfo returned =
|
||||
TypeConverter.fromYarn(queueInfo, new Configuration());
|
||||
TypeConverter.fromYarn(queueInfo, new Configuration());
|
||||
Assert.assertEquals("queueInfo translation didn't work.",
|
||||
returned.getState().toString(), queueInfo.getQueueState().toString().toLowerCase());
|
||||
returned.getState().toString(),
|
||||
StringUtils.toLowerCase(queueInfo.getQueueState().toString()));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -116,7 +116,7 @@ abstract public class Task implements Writable, Configurable {
|
|||
* BYTES_READ counter and second one is of the BYTES_WRITTEN counter.
|
||||
*/
|
||||
protected static String[] getFileSystemCounterNames(String uriScheme) {
|
||||
String scheme = uriScheme.toUpperCase();
|
||||
String scheme = StringUtils.toUpperCase(uriScheme);
|
||||
return new String[]{scheme+"_BYTES_READ", scheme+"_BYTES_WRITTEN"};
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@ import java.util.Arrays;
|
|||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.ConcurrentSkipListMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
|
@ -42,6 +41,7 @@ import org.apache.hadoop.io.WritableUtils;
|
|||
import org.apache.hadoop.mapreduce.Counter;
|
||||
import org.apache.hadoop.mapreduce.FileSystemCounter;
|
||||
import org.apache.hadoop.mapreduce.util.ResourceBundles;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* An abstract class to provide common implementation of the filesystem
|
||||
|
@ -227,7 +227,7 @@ public abstract class FileSystemCounterGroup<C extends Counter>
|
|||
}
|
||||
|
||||
private String checkScheme(String scheme) {
|
||||
String fixed = scheme.toUpperCase(Locale.US);
|
||||
String fixed = StringUtils.toUpperCase(scheme);
|
||||
String interned = schemes.putIfAbsent(fixed, fixed);
|
||||
if (schemes.size() > MAX_NUM_SCHEMES) {
|
||||
// mistakes or abuses
|
||||
|
|
|
@ -473,7 +473,7 @@ public class DistributedCache {
|
|||
if (fragment == null) {
|
||||
return false;
|
||||
}
|
||||
String lowerCaseFragment = fragment.toLowerCase();
|
||||
String lowerCaseFragment = StringUtils.toLowerCase(fragment);
|
||||
if (fragments.contains(lowerCaseFragment)) {
|
||||
return false;
|
||||
}
|
||||
|
@ -488,7 +488,7 @@ public class DistributedCache {
|
|||
if (fragment == null) {
|
||||
return false;
|
||||
}
|
||||
String lowerCaseFragment = fragment.toLowerCase();
|
||||
String lowerCaseFragment = StringUtils.toLowerCase(fragment);
|
||||
if (fragments.contains(lowerCaseFragment)) {
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -45,6 +45,8 @@ import org.apache.hadoop.mapreduce.JobContext;
|
|||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||
import org.apache.hadoop.mapreduce.RecordReader;
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* A InputFormat that reads input data from an SQL table.
|
||||
* <p>
|
||||
|
@ -162,7 +164,8 @@ public class DBInputFormat<T extends DBWritable>
|
|||
this.connection = createConnection();
|
||||
|
||||
DatabaseMetaData dbMeta = connection.getMetaData();
|
||||
this.dbProductName = dbMeta.getDatabaseProductName().toUpperCase();
|
||||
this.dbProductName =
|
||||
StringUtils.toUpperCase(dbMeta.getDatabaseProductName());
|
||||
}
|
||||
catch (Exception ex) {
|
||||
throw new RuntimeException(ex);
|
||||
|
|
|
@ -222,12 +222,14 @@ public class CLI extends Configured implements Tool {
|
|||
taskType = argv[2];
|
||||
taskState = argv[3];
|
||||
displayTasks = true;
|
||||
if (!taskTypes.contains(taskType.toUpperCase())) {
|
||||
if (!taskTypes.contains(
|
||||
org.apache.hadoop.util.StringUtils.toUpperCase(taskType))) {
|
||||
System.out.println("Error: Invalid task-type: " + taskType);
|
||||
displayUsage(cmd);
|
||||
return exitCode;
|
||||
}
|
||||
if (!taskStates.contains(taskState.toLowerCase())) {
|
||||
if (!taskStates.contains(
|
||||
org.apache.hadoop.util.StringUtils.toLowerCase(taskState))) {
|
||||
System.out.println("Error: Invalid task-state: " + taskState);
|
||||
displayUsage(cmd);
|
||||
return exitCode;
|
||||
|
@ -593,7 +595,8 @@ public class CLI extends Configured implements Tool {
|
|||
throws IOException, InterruptedException {
|
||||
|
||||
TaskReport[] reports=null;
|
||||
reports = job.getTaskReports(TaskType.valueOf(type.toUpperCase()));
|
||||
reports = job.getTaskReports(TaskType.valueOf(
|
||||
org.apache.hadoop.util.StringUtils.toUpperCase(type)));
|
||||
for (TaskReport report : reports) {
|
||||
TIPStatus status = report.getCurrentStatus();
|
||||
if ((state.equalsIgnoreCase("pending") && status ==TIPStatus.PENDING) ||
|
||||
|
|
|
@ -154,16 +154,16 @@ public class TestDFSIO implements Tool {
|
|||
static ByteMultiple parseString(String sMultiple) {
|
||||
if(sMultiple == null || sMultiple.isEmpty()) // MB by default
|
||||
return MB;
|
||||
String sMU = sMultiple.toUpperCase();
|
||||
if(B.name().toUpperCase().endsWith(sMU))
|
||||
String sMU = StringUtils.toUpperCase(sMultiple);
|
||||
if(StringUtils.toUpperCase(B.name()).endsWith(sMU))
|
||||
return B;
|
||||
if(KB.name().toUpperCase().endsWith(sMU))
|
||||
if(StringUtils.toUpperCase(KB.name()).endsWith(sMU))
|
||||
return KB;
|
||||
if(MB.name().toUpperCase().endsWith(sMU))
|
||||
if(StringUtils.toUpperCase(MB.name()).endsWith(sMU))
|
||||
return MB;
|
||||
if(GB.name().toUpperCase().endsWith(sMU))
|
||||
if(StringUtils.toUpperCase(GB.name()).endsWith(sMU))
|
||||
return GB;
|
||||
if(TB.name().toUpperCase().endsWith(sMU))
|
||||
if(StringUtils.toUpperCase(TB.name()).endsWith(sMU))
|
||||
return TB;
|
||||
throw new IllegalArgumentException("Unsupported ByteMultiple "+sMultiple);
|
||||
}
|
||||
|
@ -736,7 +736,7 @@ public class TestDFSIO implements Tool {
|
|||
}
|
||||
|
||||
for (int i = 0; i < args.length; i++) { // parse command line
|
||||
if (args[i].toLowerCase().startsWith("-read")) {
|
||||
if (StringUtils.toLowerCase(args[i]).startsWith("-read")) {
|
||||
testType = TestType.TEST_TYPE_READ;
|
||||
} else if (args[i].equalsIgnoreCase("-write")) {
|
||||
testType = TestType.TEST_TYPE_WRITE;
|
||||
|
@ -755,9 +755,9 @@ public class TestDFSIO implements Tool {
|
|||
testType = TestType.TEST_TYPE_TRUNCATE;
|
||||
} else if (args[i].equalsIgnoreCase("-clean")) {
|
||||
testType = TestType.TEST_TYPE_CLEANUP;
|
||||
} else if (args[i].toLowerCase().startsWith("-seq")) {
|
||||
} else if (StringUtils.toLowerCase(args[i]).startsWith("-seq")) {
|
||||
isSequential = true;
|
||||
} else if (args[i].toLowerCase().startsWith("-compression")) {
|
||||
} else if (StringUtils.toLowerCase(args[i]).startsWith("-compression")) {
|
||||
compressionClass = args[++i];
|
||||
} else if (args[i].equalsIgnoreCase("-nrfiles")) {
|
||||
nrFiles = Integer.parseInt(args[++i]);
|
||||
|
|
|
@ -49,6 +49,7 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
|
|||
import org.apache.hadoop.mapred.*;
|
||||
import org.apache.hadoop.mapred.lib.LongSumReducer;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
public class TestFileSystem extends TestCase {
|
||||
private static final Log LOG = FileSystem.LOG;
|
||||
|
@ -556,7 +557,8 @@ public class TestFileSystem extends TestCase {
|
|||
static void checkPath(MiniDFSCluster cluster, FileSystem fileSys) throws IOException {
|
||||
InetSocketAddress add = cluster.getNameNode().getNameNodeAddress();
|
||||
// Test upper/lower case
|
||||
fileSys.checkPath(new Path("hdfs://" + add.getHostName().toUpperCase() + ":" + add.getPort()));
|
||||
fileSys.checkPath(new Path("hdfs://"
|
||||
+ StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort()));
|
||||
}
|
||||
|
||||
public void testFsClose() throws Exception {
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
|
||||
package org.apache.hadoop.fs.slive;
|
||||
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Constants used in various places in slive
|
||||
*/
|
||||
|
@ -35,7 +37,7 @@ class Constants {
|
|||
enum Distribution {
|
||||
BEG, END, UNIFORM, MID;
|
||||
String lowerName() {
|
||||
return this.name().toLowerCase();
|
||||
return StringUtils.toLowerCase(this.name());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -45,7 +47,7 @@ class Constants {
|
|||
enum OperationType {
|
||||
READ, APPEND, RENAME, LS, MKDIR, DELETE, CREATE, TRUNCATE;
|
||||
String lowerName() {
|
||||
return this.name().toLowerCase();
|
||||
return StringUtils.toLowerCase(this.name());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.apache.hadoop.fs.slive;
|
||||
|
||||
import org.apache.hadoop.fs.slive.Constants.Distribution;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* This class holds the data representing what an operations distribution and
|
||||
|
@ -52,7 +53,7 @@ class OperationData {
|
|||
percent = (Double.parseDouble(pieces[0]) / 100.0d);
|
||||
} else if (pieces.length >= 2) {
|
||||
percent = (Double.parseDouble(pieces[0]) / 100.0d);
|
||||
distribution = Distribution.valueOf(pieces[1].toUpperCase());
|
||||
distribution = Distribution.valueOf(StringUtils.toUpperCase(pieces[1]));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.apache.hadoop.fs.slive;
|
||||
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* An operation output has the following object format whereby simple types are
|
||||
|
@ -67,7 +68,8 @@ class OperationOutput {
|
|||
"Invalid key format - no type seperator - " + TYPE_SEP);
|
||||
}
|
||||
try {
|
||||
dataType = OutputType.valueOf(key.substring(0, place).toUpperCase());
|
||||
dataType = OutputType.valueOf(
|
||||
StringUtils.toUpperCase(key.substring(0, place)));
|
||||
} catch (Exception e) {
|
||||
throw new IllegalArgumentException(
|
||||
"Invalid key format - invalid output type", e);
|
||||
|
|
|
@ -42,6 +42,7 @@ import org.apache.hadoop.mapred.FileOutputFormat;
|
|||
import org.apache.hadoop.mapred.JobClient;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.hadoop.mapred.TextOutputFormat;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.Tool;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
|
||||
|
@ -157,7 +158,7 @@ public class SliveTest implements Tool {
|
|||
if (val == null) {
|
||||
return false;
|
||||
}
|
||||
String cleanupOpt = val.toLowerCase().trim();
|
||||
String cleanupOpt = StringUtils.toLowerCase(val).trim();
|
||||
if (cleanupOpt.equals("true") || cleanupOpt.equals("1")) {
|
||||
return true;
|
||||
} else {
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.apache.hadoop.io.compress.CompressionCodec;
|
|||
import org.apache.hadoop.io.compress.GzipCodec;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.*;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.Tool;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
|
||||
|
@ -214,23 +215,25 @@ public class FileBench extends Configured implements Tool {
|
|||
if (!(fmt == Format.txt || cod == CCodec.pln)) {
|
||||
for (CType typ : ct) {
|
||||
String fn =
|
||||
fmt.name().toUpperCase() + "_" +
|
||||
cod.name().toUpperCase() + "_" +
|
||||
typ.name().toUpperCase();
|
||||
StringUtils.toUpperCase(fmt.name()) + "_" +
|
||||
StringUtils.toUpperCase(cod.name()) + "_" +
|
||||
StringUtils.toUpperCase(typ.name());
|
||||
typ.configure(job);
|
||||
System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
|
||||
System.out.print(
|
||||
StringUtils.toUpperCase(rwop.name()) + " " + fn + ": ");
|
||||
System.out.println(rwop.exec(fn, job) / 1000 +
|
||||
" seconds");
|
||||
}
|
||||
} else {
|
||||
String fn =
|
||||
fmt.name().toUpperCase() + "_" +
|
||||
cod.name().toUpperCase();
|
||||
StringUtils.toUpperCase(fmt.name()) + "_" +
|
||||
StringUtils.toUpperCase(cod.name());
|
||||
Path p = new Path(root, fn);
|
||||
if (rwop == RW.r && !fs.exists(p)) {
|
||||
fn += cod.getExt();
|
||||
}
|
||||
System.out.print(rwop.name().toUpperCase() + " " + fn + ": ");
|
||||
System.out.print(
|
||||
StringUtils.toUpperCase(rwop.name()) + " " + fn + ": ");
|
||||
System.out.println(rwop.exec(fn, job) / 1000 +
|
||||
" seconds");
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
|
|||
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
||||
import org.apache.hadoop.mapred.lib.IdentityReducer;
|
||||
import org.apache.hadoop.mapreduce.MRConfig;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.Tool;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.junit.After;
|
||||
|
@ -280,7 +281,7 @@ public class TestMapRed extends Configured implements Tool {
|
|||
public void map(WritableComparable key, Text value,
|
||||
OutputCollector<Text, Text> output,
|
||||
Reporter reporter) throws IOException {
|
||||
String str = value.toString().toLowerCase();
|
||||
String str = StringUtils.toLowerCase(value.toString());
|
||||
output.collect(new Text(str), value);
|
||||
}
|
||||
|
||||
|
|
|
@ -102,7 +102,7 @@ public class DBCountPageView extends Configured implements Tool {
|
|||
|
||||
private void createConnection(String driverClassName
|
||||
, String url) throws Exception {
|
||||
if(driverClassName.toLowerCase().contains("oracle")) {
|
||||
if(StringUtils.toLowerCase(driverClassName).contains("oracle")) {
|
||||
isOracle = true;
|
||||
}
|
||||
Class.forName(driverClassName);
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.maven.plugin.versioninfo;
|
||||
|
||||
import java.util.Locale;
|
||||
import org.apache.hadoop.maven.plugin.util.Exec;
|
||||
import org.apache.hadoop.maven.plugin.util.FileSetUtils;
|
||||
import org.apache.maven.model.FileSet;
|
||||
|
@ -329,7 +330,8 @@ public class VersionInfoMojo extends AbstractMojo {
|
|||
}
|
||||
|
||||
private String normalizePath(File file) {
|
||||
return file.getPath().toUpperCase().replaceAll("\\\\", "/");
|
||||
return file.getPath().toUpperCase(Locale.ENGLISH)
|
||||
.replaceAll("\\\\", "/");
|
||||
}
|
||||
});
|
||||
byte[] md5 = computeMD5(files);
|
||||
|
|
|
@ -984,8 +984,8 @@ public class AzureNativeFileSystemStore implements NativeFileSystemStore {
|
|||
private String verifyAndConvertToStandardFormat(String rawDir) throws URISyntaxException {
|
||||
URI asUri = new URI(rawDir);
|
||||
if (asUri.getAuthority() == null
|
||||
|| asUri.getAuthority().toLowerCase(Locale.US).equalsIgnoreCase(
|
||||
sessionUri.getAuthority().toLowerCase(Locale.US))) {
|
||||
|| asUri.getAuthority().toLowerCase(Locale.ENGLISH).equalsIgnoreCase(
|
||||
sessionUri.getAuthority().toLowerCase(Locale.ENGLISH))) {
|
||||
// Applies to me.
|
||||
return trim(asUri.getPath(), "/");
|
||||
} else {
|
||||
|
|
|
@ -51,6 +51,7 @@ import org.apache.hadoop.tools.DistCpOptions.FileAttribute;
|
|||
import org.apache.hadoop.tools.mapred.UniformSizeInputFormat;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* Utility functions used in DistCp.
|
||||
|
@ -121,8 +122,9 @@ public class DistCpUtils {
|
|||
*/
|
||||
public static Class<? extends InputFormat> getStrategy(Configuration conf,
|
||||
DistCpOptions options) {
|
||||
String confLabel = "distcp." +
|
||||
options.getCopyStrategy().toLowerCase(Locale.getDefault()) + ".strategy.impl";
|
||||
String confLabel = "distcp."
|
||||
+ StringUtils.toLowerCase(options.getCopyStrategy())
|
||||
+ ".strategy" + ".impl";
|
||||
return conf.getClass(confLabel, UniformSizeInputFormat.class, InputFormat.class);
|
||||
}
|
||||
|
||||
|
@ -221,7 +223,8 @@ public class DistCpUtils {
|
|||
|
||||
final boolean preserveXAttrs = attributes.contains(FileAttribute.XATTR);
|
||||
if (preserveXAttrs || preserveRawXattrs) {
|
||||
final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
|
||||
final String rawNS =
|
||||
StringUtils.toLowerCase(XAttr.NameSpace.RAW.name());
|
||||
Map<String, byte[]> srcXAttrs = srcFileStatus.getXAttrs();
|
||||
Map<String, byte[]> targetXAttrs = getXAttrs(targetFS, path);
|
||||
if (srcXAttrs != null && !srcXAttrs.equals(targetXAttrs)) {
|
||||
|
@ -321,7 +324,8 @@ public class DistCpUtils {
|
|||
copyListingFileStatus.setXAttrs(srcXAttrs);
|
||||
} else {
|
||||
Map<String, byte[]> trgXAttrs = Maps.newHashMap();
|
||||
final String rawNS = XAttr.NameSpace.RAW.name().toLowerCase();
|
||||
final String rawNS =
|
||||
StringUtils.toLowerCase(XAttr.NameSpace.RAW.name());
|
||||
for (Map.Entry<String, byte[]> ent : srcXAttrs.entrySet()) {
|
||||
final String xattrName = ent.getKey();
|
||||
if (xattrName.startsWith(rawNS)) {
|
||||
|
|
|
@ -169,7 +169,9 @@ public class DistCpV1 implements Tool {
|
|||
|
||||
final char symbol;
|
||||
|
||||
private FileAttribute() {symbol = toString().toLowerCase().charAt(0);}
|
||||
private FileAttribute() {
|
||||
symbol = StringUtils.toLowerCase(toString()).charAt(0);
|
||||
}
|
||||
|
||||
static EnumSet<FileAttribute> parse(String s) {
|
||||
if (s == null || s.length() == 0) {
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.hadoop.mapred.gridmix.Statistics.ClusterStats;
|
|||
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
enum GridmixJobSubmissionPolicy {
|
||||
|
||||
|
@ -84,6 +85,6 @@ enum GridmixJobSubmissionPolicy {
|
|||
public static GridmixJobSubmissionPolicy getPolicy(
|
||||
Configuration conf, GridmixJobSubmissionPolicy defaultPolicy) {
|
||||
String policy = conf.get(JOB_SUBMISSION_POLICY, defaultPolicy.name());
|
||||
return valueOf(policy.toUpperCase());
|
||||
return valueOf(StringUtils.toUpperCase(policy));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,12 +27,12 @@ import org.apache.hadoop.fs.swift.http.RestClientBindings;
|
|||
import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem;
|
||||
import org.apache.hadoop.fs.swift.util.SwiftTestUtils;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.util.Locale;
|
||||
|
||||
public class TestSwiftFileSystemExtendedContract extends SwiftFileSystemBaseTest {
|
||||
|
||||
|
@ -115,7 +115,7 @@ public class TestSwiftFileSystemExtendedContract extends SwiftFileSystemBaseTest
|
|||
public void testFilesystemIsCaseSensitive() throws Exception {
|
||||
String mixedCaseFilename = "/test/UPPER.TXT";
|
||||
Path upper = path(mixedCaseFilename);
|
||||
Path lower = path(mixedCaseFilename.toLowerCase(Locale.ENGLISH));
|
||||
Path lower = path(StringUtils.toLowerCase(mixedCaseFilename));
|
||||
assertFalse("File exists" + upper, fs.exists(upper));
|
||||
assertFalse("File exists" + lower, fs.exists(lower));
|
||||
FSDataOutputStream out = fs.create(upper);
|
||||
|
|
|
@ -38,6 +38,7 @@ import java.util.regex.Pattern;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.Tool;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.apache.hadoop.util.LineReader;
|
||||
|
@ -319,42 +320,42 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
|
|||
}
|
||||
|
||||
for (int i = 0; i < args.length - (inputFilename == null ? 0 : 1); ++i) {
|
||||
if ("-h".equals(args[i].toLowerCase())
|
||||
|| "-help".equals(args[i].toLowerCase())) {
|
||||
if (StringUtils.equalsIgnoreCase("-h", args[i])
|
||||
|| StringUtils.equalsIgnoreCase("-help", args[i])) {
|
||||
usage();
|
||||
return 0;
|
||||
}
|
||||
|
||||
if ("-c".equals(args[i].toLowerCase())
|
||||
|| "-collect-prefixes".equals(args[i].toLowerCase())) {
|
||||
if (StringUtils.equalsIgnoreCase("-c", args[i])
|
||||
|| StringUtils.equalsIgnoreCase("-collect-prefixes", args[i])) {
|
||||
collecting = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
// these control the job digest
|
||||
if ("-write-job-trace".equals(args[i].toLowerCase())) {
|
||||
if (StringUtils.equalsIgnoreCase("-write-job-trace", args[i])) {
|
||||
++i;
|
||||
jobTraceFilename = new Path(args[i]);
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("-single-line-job-traces".equals(args[i].toLowerCase())) {
|
||||
if (StringUtils.equalsIgnoreCase("-single-line-job-traces", args[i])) {
|
||||
prettyprintTrace = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("-omit-task-details".equals(args[i].toLowerCase())) {
|
||||
if (StringUtils.equalsIgnoreCase("-omit-task-details", args[i])) {
|
||||
omitTaskDetails = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("-write-topology".equals(args[i].toLowerCase())) {
|
||||
if (StringUtils.equalsIgnoreCase("-write-topology", args[i])) {
|
||||
++i;
|
||||
topologyFilename = new Path(args[i]);
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("-job-digest-spectra".equals(args[i].toLowerCase())) {
|
||||
if (StringUtils.equalsIgnoreCase("-job-digest-spectra", args[i])) {
|
||||
ArrayList<Integer> values = new ArrayList<Integer>();
|
||||
|
||||
++i;
|
||||
|
@ -384,13 +385,13 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
|
|||
continue;
|
||||
}
|
||||
|
||||
if ("-d".equals(args[i].toLowerCase())
|
||||
|| "-debug".equals(args[i].toLowerCase())) {
|
||||
if (StringUtils.equalsIgnoreCase("-d", args[i])
|
||||
|| StringUtils.equalsIgnoreCase("-debug", args[i])) {
|
||||
debug = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("-spreads".equals(args[i].toLowerCase())) {
|
||||
if (StringUtils.equalsIgnoreCase("-spreads", args[i])) {
|
||||
int min = Integer.parseInt(args[i + 1]);
|
||||
int max = Integer.parseInt(args[i + 2]);
|
||||
|
||||
|
@ -404,22 +405,22 @@ public class HadoopLogsAnalyzer extends Configured implements Tool {
|
|||
}
|
||||
|
||||
// These control log-wide CDF outputs
|
||||
if ("-delays".equals(args[i].toLowerCase())) {
|
||||
if (StringUtils.equalsIgnoreCase("-delays", args[i])) {
|
||||
delays = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("-runtimes".equals(args[i].toLowerCase())) {
|
||||
if (StringUtils.equalsIgnoreCase("-runtimes", args[i])) {
|
||||
runtimes = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("-tasktimes".equals(args[i].toLowerCase())) {
|
||||
if (StringUtils.equalsIgnoreCase("-tasktimes", args[i])) {
|
||||
collectTaskTimes = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if ("-v1".equals(args[i].toLowerCase())) {
|
||||
if (StringUtils.equalsIgnoreCase("-v1", args[i])) {
|
||||
version = 1;
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -433,7 +433,7 @@ public class JobBuilder {
|
|||
return Values.SUCCESS;
|
||||
}
|
||||
|
||||
return Values.valueOf(name.toUpperCase());
|
||||
return Values.valueOf(StringUtils.toUpperCase(name));
|
||||
}
|
||||
|
||||
private void processTaskUpdatedEvent(TaskUpdatedEvent event) {
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.hadoop.mapreduce.jobhistory.JhCounter;
|
|||
import org.apache.hadoop.mapreduce.jobhistory.JhCounterGroup;
|
||||
import org.apache.hadoop.mapreduce.jobhistory.JhCounters;
|
||||
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.codehaus.jackson.annotate.JsonAnySetter;
|
||||
|
||||
/**
|
||||
|
@ -243,7 +244,7 @@ public class LoggedTask implements DeepCompare {
|
|||
}
|
||||
|
||||
private static String canonicalizeCounterName(String nonCanonicalName) {
|
||||
String result = nonCanonicalName.toLowerCase();
|
||||
String result = StringUtils.toLowerCase(nonCanonicalName);
|
||||
|
||||
result = result.replace(' ', '|');
|
||||
result = result.replace('-', '|');
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.util.List;
|
|||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.codehaus.jackson.annotate.JsonAnySetter;
|
||||
|
||||
// HACK ALERT!!! This "should" have have two subclasses, which might be called
|
||||
|
@ -611,7 +612,7 @@ public class LoggedTaskAttempt implements DeepCompare {
|
|||
}
|
||||
|
||||
private static String canonicalizeCounterName(String nonCanonicalName) {
|
||||
String result = nonCanonicalName.toLowerCase();
|
||||
String result = StringUtils.toLowerCase(nonCanonicalName);
|
||||
|
||||
result = result.replace(' ', '|');
|
||||
result = result.replace('-', '|');
|
||||
|
|
|
@ -25,6 +25,7 @@ import java.util.*;
|
|||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/**
|
||||
* This is a class used to get the current environment
|
||||
|
@ -43,7 +44,7 @@ public class Environment extends Properties {
|
|||
// http://lopica.sourceforge.net/os.html
|
||||
String command = null;
|
||||
String OS = System.getProperty("os.name");
|
||||
String lowerOs = OS.toLowerCase();
|
||||
String lowerOs = StringUtils.toLowerCase(OS);
|
||||
if (OS.indexOf("Windows") > -1) {
|
||||
command = "cmd /C set";
|
||||
} else if (lowerOs.indexOf("ix") > -1 || lowerOs.indexOf("linux") > -1
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.commons.cli.Option;
|
|||
import org.apache.commons.cli.Options;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
|
@ -173,7 +174,7 @@ public class ApplicationCLI extends YarnCLI {
|
|||
if (types != null) {
|
||||
for (String type : types) {
|
||||
if (!type.trim().isEmpty()) {
|
||||
appTypes.add(type.toUpperCase().trim());
|
||||
appTypes.add(StringUtils.toUpperCase(type).trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -191,8 +192,8 @@ public class ApplicationCLI extends YarnCLI {
|
|||
break;
|
||||
}
|
||||
try {
|
||||
appStates.add(YarnApplicationState.valueOf(state
|
||||
.toUpperCase().trim()));
|
||||
appStates.add(YarnApplicationState.valueOf(
|
||||
StringUtils.toUpperCase(state).trim()));
|
||||
} catch (IllegalArgumentException ex) {
|
||||
sysout.println("The application state " + state
|
||||
+ " is invalid.");
|
||||
|
|
|
@ -111,7 +111,8 @@ public class NodeCLI extends YarnCLI {
|
|||
if (types != null) {
|
||||
for (String type : types) {
|
||||
if (!type.trim().isEmpty()) {
|
||||
nodeStates.add(NodeState.valueOf(type.trim().toUpperCase()));
|
||||
nodeStates.add(NodeState.valueOf(
|
||||
org.apache.hadoop.util.StringUtils.toUpperCase(type.trim())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ import java.util.Set;
|
|||
import org.apache.commons.lang.math.LongRange;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
|
@ -213,7 +214,7 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
|
|||
// Convert applicationTags to lower case and add
|
||||
this.applicationTags = new HashSet<String>();
|
||||
for (String tag : tags) {
|
||||
this.applicationTags.add(tag.toLowerCase());
|
||||
this.applicationTags.add(StringUtils.toLowerCase(tag));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -258,7 +259,8 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
|
|||
public void setApplicationStates(Set<String> applicationStates) {
|
||||
EnumSet<YarnApplicationState> appStates = null;
|
||||
for (YarnApplicationState state : YarnApplicationState.values()) {
|
||||
if (applicationStates.contains(state.name().toLowerCase())) {
|
||||
if (applicationStates.contains(
|
||||
StringUtils.toLowerCase(state.name()))) {
|
||||
if (appStates == null) {
|
||||
appStates = EnumSet.of(state);
|
||||
} else {
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.util.Set;
|
|||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
|
||||
|
@ -291,7 +292,7 @@ extends ApplicationSubmissionContext {
|
|||
// Convert applicationTags to lower case and add
|
||||
this.applicationTags = new HashSet<String>();
|
||||
for (String tag : tags) {
|
||||
this.applicationTags.add(tag.toLowerCase());
|
||||
this.applicationTags.add(StringUtils.toLowerCase(tag));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,7 +23,6 @@ import java.io.FileNotFoundException;
|
|||
import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.Future;
|
||||
|
@ -47,6 +46,7 @@ import org.apache.hadoop.fs.permission.FsPermission;
|
|||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.util.RunJar;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.api.records.LocalResource;
|
||||
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
|
||||
|
||||
|
@ -272,7 +272,7 @@ public class FSDownload implements Callable<Path> {
|
|||
private long unpack(File localrsrc, File dst) throws IOException {
|
||||
switch (resource.getType()) {
|
||||
case ARCHIVE: {
|
||||
String lowerDst = dst.getName().toLowerCase(Locale.ENGLISH);
|
||||
String lowerDst = StringUtils.toLowerCase(dst.getName());
|
||||
if (lowerDst.endsWith(".jar")) {
|
||||
RunJar.unJar(localrsrc, dst);
|
||||
} else if (lowerDst.endsWith(".zip")) {
|
||||
|
@ -291,7 +291,7 @@ public class FSDownload implements Callable<Path> {
|
|||
}
|
||||
break;
|
||||
case PATTERN: {
|
||||
String lowerDst = dst.getName().toLowerCase(Locale.ENGLISH);
|
||||
String lowerDst = StringUtils.toLowerCase(dst.getName());
|
||||
if (lowerDst.endsWith(".jar")) {
|
||||
String p = resource.getPattern();
|
||||
RunJar.unJar(localrsrc, dst,
|
||||
|
|
|
@ -26,7 +26,6 @@ import java.lang.annotation.Annotation;
|
|||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.ParameterizedType;
|
||||
import java.lang.reflect.Type;
|
||||
import java.util.Locale;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
|
@ -35,6 +34,7 @@ import org.apache.commons.cli.GnuParser;
|
|||
import org.apache.commons.cli.HelpFormatter;
|
||||
import org.apache.commons.cli.Options;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.webapp.WebAppException;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
|
@ -241,7 +241,7 @@ public class HamletGen {
|
|||
puts(indent, "\n",
|
||||
"private <T extends _> ", retName, "<T> ", methodName,
|
||||
"_(T e, boolean inline) {\n",
|
||||
" return new ", retName, "<T>(\"", retName.toLowerCase(Locale.US),
|
||||
" return new ", retName, "<T>(\"", StringUtils.toLowerCase(retName),
|
||||
"\", e, opt(", !endTagOptional.contains(retName), ", inline, ",
|
||||
retName.equals("PRE"), ")); }");
|
||||
}
|
||||
|
@ -258,7 +258,7 @@ public class HamletGen {
|
|||
puts(0, ") {");
|
||||
puts(indent,
|
||||
topMode ? "" : " closeAttrs();\n",
|
||||
" return ", retName.toLowerCase(Locale.US), "_(this, ",
|
||||
" return ", StringUtils.toLowerCase(retName), "_" + "(this, ",
|
||||
isInline(className, retName), ");\n", "}");
|
||||
} else if (params.length == 1) {
|
||||
puts(0, "String selector) {");
|
||||
|
|
|
@ -88,7 +88,8 @@ public class RegistryUtils {
|
|||
* @return the converted username
|
||||
*/
|
||||
public static String convertUsername(String username) {
|
||||
String converted= username.toLowerCase(Locale.ENGLISH);
|
||||
String converted =
|
||||
org.apache.hadoop.util.StringUtils.toLowerCase(username);
|
||||
int atSymbol = converted.indexOf('@');
|
||||
if (atSymbol > 0) {
|
||||
converted = converted.substring(0, atSymbol);
|
||||
|
|
|
@ -31,6 +31,7 @@ import javax.ws.rs.QueryParam;
|
|||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.server.api.ApplicationContext;
|
||||
import org.apache.hadoop.yarn.server.webapp.WebServices;
|
||||
|
@ -147,7 +148,8 @@ public class AHSWebServices extends WebServices {
|
|||
}
|
||||
Set<String> appStates = parseQueries(statesQuery, true);
|
||||
for (String appState : appStates) {
|
||||
switch (YarnApplicationState.valueOf(appState.toUpperCase())) {
|
||||
switch (YarnApplicationState.valueOf(
|
||||
StringUtils.toUpperCase(appState))) {
|
||||
case FINISHED:
|
||||
case FAILED:
|
||||
case KILLED:
|
||||
|
|
|
@ -52,6 +52,7 @@ import org.apache.commons.logging.LogFactory;
|
|||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.api.records.timeline.TimelineDomain;
|
||||
import org.apache.hadoop.yarn.api.records.timeline.TimelineDomains;
|
||||
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
|
||||
|
@ -417,7 +418,7 @@ public class TimelineWebServices {
|
|||
String[] strs = str.split(delimiter);
|
||||
List<Field> fieldList = new ArrayList<Field>();
|
||||
for (String s : strs) {
|
||||
s = s.trim().toUpperCase();
|
||||
s = StringUtils.toUpperCase(s.trim());
|
||||
if (s.equals("EVENTS")) {
|
||||
fieldList.add(Field.EVENTS);
|
||||
} else if (s.equals("LASTEVENTONLY")) {
|
||||
|
|
|
@ -31,6 +31,7 @@ import javax.ws.rs.WebApplicationException;
|
|||
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.authorize.AuthorizationException;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
|
@ -162,9 +163,9 @@ public class WebServices {
|
|||
break;
|
||||
}
|
||||
|
||||
if (checkAppStates
|
||||
&& !appStates.contains(appReport.getYarnApplicationState().toString()
|
||||
.toLowerCase())) {
|
||||
if (checkAppStates &&
|
||||
!appStates.contains(StringUtils.toLowerCase(
|
||||
appReport.getYarnApplicationState().toString()))) {
|
||||
continue;
|
||||
}
|
||||
if (finalStatusQuery != null && !finalStatusQuery.isEmpty()) {
|
||||
|
@ -184,9 +185,9 @@ public class WebServices {
|
|||
continue;
|
||||
}
|
||||
}
|
||||
if (checkAppTypes
|
||||
&& !appTypes.contains(appReport.getApplicationType().trim()
|
||||
.toLowerCase())) {
|
||||
if (checkAppTypes &&
|
||||
!appTypes.contains(
|
||||
StringUtils.toLowerCase(appReport.getApplicationType().trim()))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -368,7 +369,8 @@ public class WebServices {
|
|||
if (isState) {
|
||||
try {
|
||||
// enum string is in the uppercase
|
||||
YarnApplicationState.valueOf(paramStr.trim().toUpperCase());
|
||||
YarnApplicationState.valueOf(
|
||||
StringUtils.toUpperCase(paramStr.trim()));
|
||||
} catch (RuntimeException e) {
|
||||
YarnApplicationState[] stateArray =
|
||||
YarnApplicationState.values();
|
||||
|
@ -378,7 +380,7 @@ public class WebServices {
|
|||
+ allAppStates);
|
||||
}
|
||||
}
|
||||
params.add(paramStr.trim().toLowerCase());
|
||||
params.add(StringUtils.toLowerCase(paramStr.trim()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,6 +46,7 @@ import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
|
|||
import org.apache.hadoop.security.authorize.PolicyProvider;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.service.AbstractService;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest;
|
||||
|
@ -756,7 +757,7 @@ public class ClientRMService extends AbstractService implements
|
|||
if (applicationTypes != null && !applicationTypes.isEmpty()) {
|
||||
String appTypeToMatch = caseSensitive
|
||||
? application.getApplicationType()
|
||||
: application.getApplicationType().toLowerCase();
|
||||
: StringUtils.toLowerCase(application.getApplicationType());
|
||||
if (!applicationTypes.contains(appTypeToMatch)) {
|
||||
continue;
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue