YARN-8363. Upgrade commons-lang version to 3.7 in hadoop-yarn-project. Contributed by Takanobu Asanuma.

This commit is contained in:
Akira Ajisaka 2018-06-13 00:34:57 +09:00
parent 5c7ad52573
commit 652bcbb3e4
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
117 changed files with 281 additions and 318 deletions

View File

@ -34,10 +34,6 @@
</properties>
<dependencies>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>

View File

@ -21,7 +21,7 @@
import java.util.EnumSet;
import java.util.Set;
import org.apache.commons.lang.math.LongRange;
import org.apache.commons.lang3.Range;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Stable;
@ -80,8 +80,8 @@ public static GetApplicationsRequest newInstance(
Set<String> applicationTypes,
Set<String> applicationTags,
EnumSet<YarnApplicationState> applicationStates,
LongRange startRange,
LongRange finishRange,
Range<Long> startRange,
Range<Long> finishRange,
Long limit) {
GetApplicationsRequest request =
Records.newRecord(GetApplicationsRequest.class);
@ -95,11 +95,11 @@ public static GetApplicationsRequest newInstance(
request.setApplicationStates(applicationStates);
if (startRange != null) {
request.setStartRange(
startRange.getMinimumLong(), startRange.getMaximumLong());
startRange.getMinimum(), startRange.getMaximum());
}
if (finishRange != null) {
request.setFinishRange(
finishRange.getMinimumLong(), finishRange.getMaximumLong());
finishRange.getMinimum(), finishRange.getMaximum());
}
if (limit != null) {
request.setLimit(limit);
@ -302,11 +302,11 @@ public static GetApplicationsRequest newInstance(
/**
* Get the range of start times to filter applications on
*
* @return {@link LongRange} of start times to filter applications on
* @return {@link Range} of start times to filter applications on
*/
@Private
@Unstable
public abstract LongRange getStartRange();
public abstract Range<Long> getStartRange();
/**
* Set the range of start times to filter applications on
@ -315,7 +315,7 @@ public static GetApplicationsRequest newInstance(
*/
@Private
@Unstable
public abstract void setStartRange(LongRange range);
public abstract void setStartRange(Range<Long> range);
/**
* Set the range of start times to filter applications on
@ -332,11 +332,11 @@ public abstract void setStartRange(long begin, long end)
/**
* Get the range of finish times to filter applications on
*
* @return {@link LongRange} of finish times to filter applications on
* @return {@link Range} of finish times to filter applications on
*/
@Private
@Unstable
public abstract LongRange getFinishRange();
public abstract Range<Long> getFinishRange();
/**
* Set the range of finish times to filter applications on
@ -345,7 +345,7 @@ public abstract void setStartRange(long begin, long end)
*/
@Private
@Unstable
public abstract void setFinishRange(LongRange range);
public abstract void setFinishRange(Range<Long> range);
/**
* Set the range of finish times to filter applications on

View File

@ -23,7 +23,7 @@
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.yarn.util.resource;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.protocolrecords.ResourceTypes;
import org.apache.hadoop.yarn.api.records.Resource;
@ -457,7 +457,7 @@ public static String[] parseResourceValue(String resourceValue) {
}
String units = resourceValue.substring(i);
if((StringUtils.isAlpha(units))) {
if (StringUtils.isAlpha(units) || units.equals("")) {
resource[0] = units;
resource[1] = resourceValue.substring(0, i);
return resource;

View File

@ -49,10 +49,6 @@
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>

View File

@ -39,7 +39,7 @@
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -27,7 +27,7 @@
import javax.ws.rs.core.MediaType;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

View File

@ -197,11 +197,6 @@
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>org.apache.curator</groupId>
<artifactId>curator-client</artifactId>

View File

@ -21,7 +21,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.yarn.service.utils.ServiceUtils;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.yarn.service.client;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.retry.RetryNTimes;

View File

@ -17,7 +17,7 @@
package org.apache.hadoop.yarn.service.monitor.probe;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
import java.io.IOException;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.yarn.service.provider;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.yarn.service.provider;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.records.Container;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.yarn.service.provider.docker;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.yarn.service.api.records.Artifact;
import org.apache.hadoop.yarn.service.api.records.ConfigFile;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.yarn.service.provider.docker;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
import org.apache.hadoop.yarn.service.provider.AbstractProviderService;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.yarn.service.provider.tarball;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.service.api.records.Artifact;

View File

@ -23,7 +23,7 @@
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;

View File

@ -21,8 +21,8 @@
import com.google.common.base.Preconditions;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;

View File

@ -46,10 +46,6 @@
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>commons-cli</groupId>
<artifactId>commons-cli</artifactId>

View File

@ -32,7 +32,7 @@
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.MissingArgumentException;
import org.apache.commons.cli.Options;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ToolRunner;

View File

@ -59,7 +59,7 @@
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.math3.util.Pair;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;

View File

@ -35,8 +35,8 @@
import org.apache.commons.cli.MissingArgumentException;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.util.ToolRunner;

View File

@ -36,7 +36,7 @@
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;

View File

@ -52,9 +52,9 @@
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.commons.lang.time.DurationFormatUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.commons.lang3.time.DurationFormatUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig.Policy;
import org.apache.hadoop.security.UserGroupInformation;

View File

@ -54,7 +54,7 @@
import java.util.Map;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;

View File

@ -47,7 +47,7 @@
import java.util.regex.Pattern;
import org.apache.commons.cli.Options;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationTimeoutsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationTimeoutsResponse;

View File

@ -65,10 +65,6 @@
<groupId>org.apache.commons</groupId>
<artifactId>commons-compress</artifactId>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>

View File

@ -23,7 +23,7 @@
import java.util.List;
import java.util.Set;
import org.apache.commons.lang.math.LongRange;
import org.apache.commons.lang3.Range;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.util.StringUtils;
@ -51,8 +51,8 @@ public class GetApplicationsRequestPBImpl extends GetApplicationsRequest {
Set<String> users = null;
Set<String> queues = null;
long limit = Long.MAX_VALUE;
LongRange start = null;
LongRange finish = null;
Range<Long> start = null;
Range<Long> finish = null;
private Set<String> applicationTags;
private ApplicationsRequestScope scope;
@ -103,12 +103,12 @@ public YarnApplicationStateProto apply(YarnApplicationState input) {
builder.setScope(ProtoUtils.convertToProtoFormat(scope));
}
if (start != null) {
builder.setStartBegin(start.getMinimumLong());
builder.setStartEnd(start.getMaximumLong());
builder.setStartBegin(start.getMinimum());
builder.setStartEnd(start.getMaximum());
}
if (finish != null) {
builder.setFinishBegin(finish.getMinimumLong());
builder.setFinishEnd(finish.getMaximumLong());
builder.setFinishBegin(finish.getMinimum());
builder.setFinishEnd(finish.getMaximum());
}
if (limit != Long.MAX_VALUE) {
builder.setLimit(limit);
@ -316,20 +316,20 @@ public void setLimit(long limit) {
}
@Override
public LongRange getStartRange() {
public Range<Long> getStartRange() {
if (this.start == null) {
GetApplicationsRequestProtoOrBuilder p = viaProto ? proto: builder;
if (p.hasStartBegin() || p.hasStartEnd()) {
long begin = p.hasStartBegin() ? p.getStartBegin() : 0L;
long end = p.hasStartEnd() ? p.getStartEnd() : Long.MAX_VALUE;
this.start = new LongRange(begin, end);
this.start = Range.between(begin, end);
}
}
return this.start;
}
@Override
public void setStartRange(LongRange range) {
public void setStartRange(Range<Long> range) {
this.start = range;
}
@ -340,24 +340,24 @@ public void setStartRange(long begin, long end)
throw new IllegalArgumentException("begin > end in range (begin, " +
"end): (" + begin + ", " + end + ")");
}
this.start = new LongRange(begin, end);
this.start = Range.between(begin, end);
}
@Override
public LongRange getFinishRange() {
public Range<Long> getFinishRange() {
if (this.finish == null) {
GetApplicationsRequestProtoOrBuilder p = viaProto ? proto: builder;
if (p.hasFinishBegin() || p.hasFinishEnd()) {
long begin = p.hasFinishBegin() ? p.getFinishBegin() : 0L;
long end = p.hasFinishEnd() ? p.getFinishEnd() : Long.MAX_VALUE;
this.finish = new LongRange(begin, end);
this.finish = Range.between(begin, end);
}
}
return this.finish;
}
@Override
public void setFinishRange(LongRange range) {
public void setFinishRange(Range<Long> range) {
this.finish = range;
}
@ -367,7 +367,7 @@ public void setFinishRange(long begin, long end) {
throw new IllegalArgumentException("begin > end in range (begin, " +
"end): (" + begin + ", " + end + ")");
}
this.finish = new LongRange(begin, end);
this.finish = Range.between(begin, end);
}
@Override

View File

@ -27,7 +27,7 @@
import java.util.List;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;

View File

@ -28,7 +28,7 @@
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.Path;
/**

View File

@ -34,7 +34,7 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;

View File

@ -28,7 +28,7 @@
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;

View File

@ -43,7 +43,7 @@
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.commons.lang.SerializationUtils;
import org.apache.commons.lang3.SerializationUtils;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;

View File

@ -37,7 +37,7 @@
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;

View File

@ -21,7 +21,7 @@
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.NodeLabel;
import org.apache.hadoop.yarn.api.records.Resource;

View File

@ -26,7 +26,7 @@
import java.util.List;
import java.util.Set;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.classification.InterfaceAudience.Private;
@Private

View File

@ -153,7 +153,7 @@ protected void append(LoggingEvent event) {
if (throwableStr != null) {
message = message + "\n" + StringUtils.join("\n", throwableStr);
message =
org.apache.commons.lang.StringUtils.left(message, MAX_MESSAGE_SIZE);
org.apache.commons.lang3.StringUtils.left(message, MAX_MESSAGE_SIZE);
}
int level = event.getLevel().toInt();

View File

@ -42,7 +42,7 @@
import org.apache.commons.io.filefilter.AndFileFilter;
import org.apache.commons.io.filefilter.DirectoryFileFilter;
import org.apache.commons.io.filefilter.RegexFileFilter;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View File

@ -31,7 +31,7 @@
import java.util.TreeMap;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -32,7 +32,7 @@
import javax.servlet.http.HttpServlet;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configuration.IntegerRanges;

View File

@ -28,7 +28,7 @@
import static java.util.EnumSet.*;
import java.util.Iterator;
import static org.apache.commons.lang.StringEscapeUtils.*;
import static org.apache.commons.lang3.StringEscapeUtils.*;
import static org.apache.hadoop.yarn.webapp.hamlet.HamletImpl.EOpt.*;
import org.apache.hadoop.classification.InterfaceAudience;
@ -106,7 +106,7 @@ protected void _p(boolean quote, Object... args) {
if (!opts.contains(PRE)) {
indent(opts);
}
out.print(quote ? escapeHtml(String.valueOf(s))
out.print(quote ? escapeHtml4(String.valueOf(s))
: String.valueOf(s));
if (!opts.contains(INLINE) && !opts.contains(PRE)) {
out.println();
@ -309,7 +309,7 @@ protected void printAttr(String name, String value) {
sb.setLength(0);
sb.append(' ').append(name);
if (value != null) {
sb.append("=\"").append(escapeHtml(value)).append("\"");
sb.append("=\"").append(escapeHtml4(value)).append("\"");
}
out.print(sb.toString());
}

View File

@ -28,7 +28,7 @@
import static java.util.EnumSet.*;
import java.util.Iterator;
import static org.apache.commons.lang.StringEscapeUtils.*;
import static org.apache.commons.lang3.StringEscapeUtils.*;
import static org.apache.hadoop.yarn.webapp.hamlet2.HamletImpl.EOpt.*;
import org.apache.hadoop.classification.InterfaceAudience;
@ -104,7 +104,7 @@ protected void _p(boolean quote, Object... args) {
if (!opts.contains(PRE)) {
indent(opts);
}
out.print(quote ? escapeHtml(String.valueOf(s))
out.print(quote ? escapeHtml4(String.valueOf(s))
: String.valueOf(s));
if (!opts.contains(INLINE) && !opts.contains(PRE)) {
out.println();
@ -307,7 +307,7 @@ protected void printAttr(String name, String value) {
sb.setLength(0);
sb.append(' ').append(name);
if (value != null) {
sb.append("=\"").append(escapeHtml(value)).append("\"");
sb.append("=\"").append(escapeHtml4(value)).append("\"");
}
out.print(sb.toString());
}

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.yarn.webapp.view;
import static org.apache.commons.lang.StringEscapeUtils.escapeJavaScript;
import static org.apache.commons.lang3.StringEscapeUtils.escapeEcmaScript;
import static org.apache.hadoop.yarn.util.StringHelper.djoin;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.util.StringHelper.split;
@ -146,7 +146,7 @@ protected void initDataTables(List<String> list) {
}
int pos = init.indexOf('{') + 1;
init = new StringBuffer(init).insert(pos, stateSaveInit).toString();
list.add(join(" $('", escapeJavaScript(selector), "').dataTable(", init,
list.add(join(" $('", escapeEcmaScript(selector), "').dataTable(", init,
").fnSetFilteringDelay(288);"));
}
@ -174,7 +174,7 @@ protected void initDialogs(List<String> list) {
if (init.isEmpty()) {
init = defaultInit;
}
list.add(join(" $('", escapeJavaScript(selector),
list.add(join(" $('", escapeEcmaScript(selector),
"').click(function() { $(this).children('.dialog').dialog(",
init, "); return false; });"));
}

View File

@ -20,7 +20,7 @@
import java.io.PrintWriter;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.yarn.webapp.View;
@ -46,8 +46,8 @@ protected TextView(ViewContext ctx, String contentType) {
public void echo(Object... args) {
PrintWriter out = writer();
for (Object s : args) {
String escapedString = StringEscapeUtils.escapeJavaScript(
StringEscapeUtils.escapeHtml(s.toString()));
String escapedString = StringEscapeUtils.escapeEcmaScript(
StringEscapeUtils.escapeHtml4(s.toString()));
out.print(escapedString);
}
}

View File

@ -20,6 +20,7 @@
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.commons.lang3.Range;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.yarn.api.resource.PlacementConstraint;
@ -101,7 +102,7 @@ private static Object genTypeValue(Type type) {
ParameterizedType pt = (ParameterizedType)type;
Type rawType = pt.getRawType();
Type [] params = pt.getActualTypeArguments();
// only support EnumSet<T>, List<T>, Set<T>, Map<K,V>
// only support EnumSet<T>, List<T>, Set<T>, Map<K,V>, Range<T>
if (rawType.equals(EnumSet.class)) {
if (params[0] instanceof Class) {
Class c = (Class)(params[0]);
@ -115,6 +116,11 @@ private static Object genTypeValue(Type type) {
Map<Object, Object> map = Maps.newHashMap();
map.put(genTypeValue(params[0]), genTypeValue(params[1]));
ret = map;
} else if (rawType.equals(Range.class)) {
ret = typeValueCache.get(rawType);
if (ret != null) {
return ret;
}
}
}
if (ret == null) {

View File

@ -21,7 +21,7 @@
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.lang.math.LongRange;
import org.apache.commons.lang3.Range;
import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationsRequestPBImpl;
@ -90,11 +90,12 @@ public void testGetApplicationsRequest(){
Assert.assertEquals(
"StartRange from proto is not the same with original request",
requestFromProto.getStartRange(), new LongRange(startBegin, startEnd));
requestFromProto.getStartRange(), Range.between(startBegin, startEnd));
Assert.assertEquals(
"FinishRange from proto is not the same with original request",
requestFromProto.getFinishRange(), new LongRange(finishBegin, finishEnd));
requestFromProto.getFinishRange(),
Range.between(finishBegin, finishEnd));
Assert.assertEquals(
"Limit from proto is not the same with original request",

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.yarn.api;
import java.io.IOException;
import org.apache.commons.lang.math.LongRange;
import org.apache.commons.lang3.Range;
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto;
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto;
import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto;
@ -360,7 +360,7 @@ public class TestPBImplRecords extends BasePBImplRecordsTest {
@BeforeClass
public static void setup() throws Exception {
typeValueCache.put(LongRange.class, new LongRange(1000, 2000));
typeValueCache.put(Range.class, Range.between(1000L, 2000L));
typeValueCache.put(URL.class, URL.newInstance(
"http", "localhost", 8080, "file0"));
typeValueCache.put(SerializedException.class,

View File

@ -27,7 +27,7 @@
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.NodeLabel;

View File

@ -34,7 +34,7 @@
import java.net.URL;
import java.net.URLEncoder;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.net.ServerSocketUtil;
import org.apache.hadoop.yarn.MockApps;

View File

@ -119,11 +119,6 @@
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>commons-net</groupId>
<artifactId>commons-net</artifactId>

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.registry.client.api;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.service.ServiceStateException;
import org.apache.hadoop.registry.client.impl.RegistryOperationsClient;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.registry.client.binding;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.registry.client.exceptions.InvalidRecordException;

View File

@ -20,7 +20,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.PathNotFoundException;

View File

@ -24,7 +24,7 @@
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
@ -238,12 +238,12 @@ public void delete(String path, boolean recursive)
@Override
public boolean addWriteAccessor(String id, String pass) throws IOException {
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
@Override
public void clearWriteAccessors() {
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
}

View File

@ -21,7 +21,7 @@
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.Lists;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;

View File

@ -21,7 +21,7 @@
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.net.util.SubnetUtils;
import org.apache.hadoop.conf.Configuration;
import static org.apache.hadoop.registry.client.api.RegistryConstants.KEY_DNS_SPLIT_REVERSE_ZONE_RANGE;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.registry.server.integration;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.registry.client.types.RegistryPathStatus;
import org.apache.hadoop.registry.client.types.ServiceRecord;
import org.apache.hadoop.registry.client.types.yarn.YarnRegistryAttributes;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.registry.server.services;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.curator.ensemble.fixed.FixedEnsembleProvider;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -20,7 +20,7 @@
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.curator.framework.api.BackgroundCallback;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.PathIsNotEmptyDirectoryException;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.registry;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.registry.client.api.RegistryConstants;
import org.apache.hadoop.registry.client.binding.RegistryUtils;

View File

@ -186,10 +186,10 @@ public GetApplicationReportResponse getApplicationReport(
IOException {
long startedBegin =
request.getStartRange() == null ? 0L : request.getStartRange()
.getMinimumLong();
.getMinimum();
long startedEnd =
request.getStartRange() == null ? Long.MAX_VALUE : request
.getStartRange().getMaximumLong();
.getStartRange().getMaximum();
GetApplicationsResponse response =
GetApplicationsResponse.newInstance(new ArrayList<ApplicationReport>(
history.getApplications(request.getLimit(), startedBegin, startedEnd)

View File

@ -32,7 +32,7 @@
import java.util.Map.Entry;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang.time.FastDateFormat;
import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;

View File

@ -37,7 +37,7 @@
import java.util.TreeMap;
import org.apache.commons.collections.map.LRUMap;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -35,7 +35,7 @@
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.core.MediaType;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

View File

@ -29,7 +29,7 @@
import java.util.List;
import java.util.TimeZone;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
@ -987,12 +987,12 @@ public GetSubClusterPoliciesConfigurationsResponse getPoliciesConfigurations(
@Override
public Version getCurrentVersion() {
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
@Override
public Version loadVersion() {
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
@Override

View File

@ -36,7 +36,7 @@
import javax.cache.integration.CacheLoaderException;
import javax.cache.spi.CachingProvider;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.retry.RetryPolicies;
import org.apache.hadoop.io.retry.RetryPolicy;
@ -538,7 +538,7 @@ public Map<K, V> loadAll(Iterable<? extends K> keys)
throws CacheLoaderException {
// The FACADE does not use the Cache's getAll API. Hence this is not
// required to be implemented
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
}

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.yarn.server.scheduler;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;

View File

@ -25,7 +25,7 @@
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;

View File

@ -62,7 +62,7 @@ public static void setTimelineFilters(Configuration conf,
}
target.addAll(defaultInitializers);
String actualInitializers =
org.apache.commons.lang.StringUtils.join(target, ",");
org.apache.commons.lang3.StringUtils.join(target, ",");
LOG.info("Filter initializers set for timeline service: " +
actualInitializers);
conf.set("hadoop.http.filter.initializers", actualInitializers);

View File

@ -25,7 +25,7 @@
import java.util.Collection;
import java.util.List;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
@ -173,8 +173,8 @@ public Collection<ContainerReport> run() throws Exception {
+ container.getNodeHttpAddress())
.append("'>")
.append(container.getNodeHttpAddress() == null ? "N/A" :
StringEscapeUtils.escapeJavaScript(StringEscapeUtils
.escapeHtml(container.getNodeHttpAddress())))
StringEscapeUtils.escapeEcmaScript(StringEscapeUtils
.escapeHtml4(container.getNodeHttpAddress())))
.append("</a>\",\"")
.append(container.getContainerExitStatus()).append("\",\"<a href='")
.append(container.getLogUrl() == null ?

View File

@ -28,7 +28,7 @@
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.security.UserGroupInformation;
@ -354,7 +354,7 @@ public ContainerReport run() throws Exception {
.append(nodeLink == null ? "#" : "href='" + nodeLink)
.append("'>")
.append(nodeLink == null ? "N/A" : StringEscapeUtils
.escapeJavaScript(StringEscapeUtils.escapeHtml(nodeLink)))
.escapeEcmaScript(StringEscapeUtils.escapeHtml4(nodeLink)))
.append("</a>\",\"<a ")
.append(logsLink == null ? "#" : "href='" + logsLink).append("'>")
.append(logsLink == null ? "N/A" : "Logs").append("</a>\"],\n");

View File

@ -32,8 +32,8 @@
import java.util.EnumSet;
import java.util.List;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.math.LongRange;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.lang3.Range;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
@ -108,7 +108,7 @@ protected void fetchData() throws YarnException, IOException,
"app.started-time.end must be greater than app.started-time.begin");
}
request.setStartRange(
new LongRange(appStartedTimeBegain, appStartedTimeEnd));
Range.between(appStartedTimeBegain, appStartedTimeEnd));
if (callerUGI == null) {
appReports = getApplicationReport(request);
@ -174,19 +174,19 @@ protected void renderData(Block html) {
.append(app.getAppId())
.append("</a>\",\"")
.append(
StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(app
StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(app
.getUser())))
.append("\",\"")
.append(
StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(app
StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(app
.getName())))
.append("\",\"")
.append(
StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(app
StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(app
.getType())))
.append("\",\"")
.append(
StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(app
StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(app
.getQueue()))).append("\",\"").append(String
.valueOf(app.getPriority()))
.append("\",\"").append(app.getStartedTime())

View File

@ -31,7 +31,7 @@
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import org.apache.commons.lang.math.LongRange;
import org.apache.commons.lang3.Range;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.util.StringUtils;
@ -151,7 +151,7 @@ public AppsInfo getApps(HttpServletRequest req, HttpServletResponse res,
final GetApplicationsRequest request =
GetApplicationsRequest.newInstance();
request.setLimit(countNum);
request.setStartRange(new LongRange(sBegin, sEnd));
request.setStartRange(Range.between(sBegin, sEnd));
try {
if (callerUGI == null) {
// TODO: the request should take the params like what RMWebServices does

View File

@ -63,10 +63,6 @@
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>

View File

@ -35,7 +35,7 @@
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.math.RandomUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileUtil;

View File

@ -35,7 +35,7 @@
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -34,7 +34,7 @@
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.yarn.api.records.ContainerSubState;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.scheduler.UpdateContainerSchedulerEvent;
import org.slf4j.Logger;

View File

@ -21,7 +21,7 @@
package org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.privileged;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;

View File

@ -20,7 +20,7 @@
package org.apache.hadoop.yarn.server.nodemanager.executor;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.Signal;

View File

@ -51,7 +51,7 @@
import javax.ws.rs.core.StreamingOutput;
import javax.ws.rs.core.UriInfo;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.http.JettyUtils;

View File

@ -1461,13 +1461,13 @@ public void testWindowsShellScriptBuilderCommand() throws IOException {
// Basic tests: less length, exact length, max+1 length
builder.command(Arrays.asList(
org.apache.commons.lang.StringUtils.repeat("A", 1024)));
org.apache.commons.lang3.StringUtils.repeat("A", 1024)));
builder.command(Arrays.asList(
org.apache.commons.lang.StringUtils.repeat(
org.apache.commons.lang3.StringUtils.repeat(
"E", Shell.WINDOWS_MAX_SHELL_LENGTH - callCmd.length())));
try {
builder.command(Arrays.asList(
org.apache.commons.lang.StringUtils.repeat(
org.apache.commons.lang3.StringUtils.repeat(
"X", Shell.WINDOWS_MAX_SHELL_LENGTH -callCmd.length() + 1)));
fail("longCommand was expected to throw");
} catch(IOException e) {
@ -1476,21 +1476,21 @@ public void testWindowsShellScriptBuilderCommand() throws IOException {
// Composite tests, from parts: less, exact and +
builder.command(Arrays.asList(
org.apache.commons.lang.StringUtils.repeat("A", 1024),
org.apache.commons.lang.StringUtils.repeat("A", 1024),
org.apache.commons.lang.StringUtils.repeat("A", 1024)));
org.apache.commons.lang3.StringUtils.repeat("A", 1024),
org.apache.commons.lang3.StringUtils.repeat("A", 1024),
org.apache.commons.lang3.StringUtils.repeat("A", 1024)));
// buildr.command joins the command parts with an extra space
builder.command(Arrays.asList(
org.apache.commons.lang.StringUtils.repeat("E", 4095),
org.apache.commons.lang.StringUtils.repeat("E", 2047),
org.apache.commons.lang.StringUtils.repeat("E", 2047 - callCmd.length())));
org.apache.commons.lang3.StringUtils.repeat("E", 4095),
org.apache.commons.lang3.StringUtils.repeat("E", 2047),
org.apache.commons.lang3.StringUtils.repeat("E", 2047 - callCmd.length())));
try {
builder.command(Arrays.asList(
org.apache.commons.lang.StringUtils.repeat("X", 4095),
org.apache.commons.lang.StringUtils.repeat("X", 2047),
org.apache.commons.lang.StringUtils.repeat("X", 2048 - callCmd.length())));
org.apache.commons.lang3.StringUtils.repeat("X", 4095),
org.apache.commons.lang3.StringUtils.repeat("X", 2047),
org.apache.commons.lang3.StringUtils.repeat("X", 2048 - callCmd.length())));
fail("long commands was expected to throw");
} catch(IOException e) {
assertThat(e.getMessage(), CoreMatchers.containsString(expectedMessage));
@ -1508,11 +1508,11 @@ public void testWindowsShellScriptBuilderEnv() throws IOException {
ShellScriptBuilder builder = ShellScriptBuilder.create();
// test env
builder.env("somekey", org.apache.commons.lang.StringUtils.repeat("A", 1024));
builder.env("somekey", org.apache.commons.lang.StringUtils.repeat(
builder.env("somekey", org.apache.commons.lang3.StringUtils.repeat("A", 1024));
builder.env("somekey", org.apache.commons.lang3.StringUtils.repeat(
"A", Shell.WINDOWS_MAX_SHELL_LENGTH - ("@set somekey=").length()));
try {
builder.env("somekey", org.apache.commons.lang.StringUtils.repeat(
builder.env("somekey", org.apache.commons.lang3.StringUtils.repeat(
"A", Shell.WINDOWS_MAX_SHELL_LENGTH - ("@set somekey=").length()) + 1);
fail("long env was expected to throw");
} catch(IOException e) {
@ -1533,11 +1533,11 @@ public void testWindowsShellScriptBuilderMkdir() throws IOException {
ShellScriptBuilder builder = ShellScriptBuilder.create();
// test mkdir
builder.mkdir(new Path(org.apache.commons.lang.StringUtils.repeat("A", 1024)));
builder.mkdir(new Path(org.apache.commons.lang.StringUtils.repeat("E",
builder.mkdir(new Path(org.apache.commons.lang3.StringUtils.repeat("A", 1024)));
builder.mkdir(new Path(org.apache.commons.lang3.StringUtils.repeat("E",
(Shell.WINDOWS_MAX_SHELL_LENGTH - mkDirCmd.length()) / 2)));
try {
builder.mkdir(new Path(org.apache.commons.lang.StringUtils.repeat(
builder.mkdir(new Path(org.apache.commons.lang3.StringUtils.repeat(
"X", (Shell.WINDOWS_MAX_SHELL_LENGTH - mkDirCmd.length())/2 +1)));
fail("long mkdir was expected to throw");
} catch(IOException e) {
@ -1557,18 +1557,18 @@ public void testWindowsShellScriptBuilderLink() throws IOException {
ShellScriptBuilder builder = ShellScriptBuilder.create();
// test link
builder.link(new Path(org.apache.commons.lang.StringUtils.repeat("A", 1024)),
new Path(org.apache.commons.lang.StringUtils.repeat("B", 1024)));
builder.link(new Path(org.apache.commons.lang3.StringUtils.repeat("A", 1024)),
new Path(org.apache.commons.lang3.StringUtils.repeat("B", 1024)));
builder.link(
new Path(org.apache.commons.lang.StringUtils.repeat(
new Path(org.apache.commons.lang3.StringUtils.repeat(
"E", (Shell.WINDOWS_MAX_SHELL_LENGTH - linkCmd.length())/2)),
new Path(org.apache.commons.lang.StringUtils.repeat(
new Path(org.apache.commons.lang3.StringUtils.repeat(
"F", (Shell.WINDOWS_MAX_SHELL_LENGTH - linkCmd.length())/2)));
try {
builder.link(
new Path(org.apache.commons.lang.StringUtils.repeat(
new Path(org.apache.commons.lang3.StringUtils.repeat(
"X", (Shell.WINDOWS_MAX_SHELL_LENGTH - linkCmd.length())/2 + 1)),
new Path(org.apache.commons.lang.StringUtils.repeat(
new Path(org.apache.commons.lang3.StringUtils.repeat(
"Y", (Shell.WINDOWS_MAX_SHELL_LENGTH - linkCmd.length())/2) + 1));
fail("long link was expected to throw");
} catch(IOException e) {

View File

@ -59,7 +59,7 @@
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.AbstractFileSystem;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;

View File

@ -46,8 +46,8 @@
import java.util.concurrent.TimeUnit;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.AbstractFileSystem;
import org.apache.hadoop.fs.FileContext;

View File

@ -138,10 +138,6 @@
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId>

View File

@ -37,7 +37,7 @@
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.cli.UnrecognizedOptionException;
import org.apache.commons.lang.math.LongRange;
import org.apache.commons.lang3.Range;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
@ -754,7 +754,7 @@ public KillApplicationResponse forceKillApplication(
message.append(" at ").append(remoteAddress.getHostAddress());
}
String diagnostics = org.apache.commons.lang.StringUtils
String diagnostics = org.apache.commons.lang3.StringUtils
.trimToNull(request.getDiagnostics());
if (diagnostics != null) {
message.append(" with diagnostic message: ");
@ -812,8 +812,8 @@ public GetApplicationsResponse getApplications(GetApplicationsRequest request)
Set<String> queues = request.getQueues();
Set<String> tags = request.getApplicationTags();
long limit = request.getLimit();
LongRange start = request.getStartRange();
LongRange finish = request.getFinishRange();
Range<Long> start = request.getStartRange();
Range<Long> finish = request.getFinishRange();
ApplicationsRequestScope scope = request.getScope();
final Map<ApplicationId, RMApp> apps = rmContext.getRMApps();
@ -888,11 +888,11 @@ public void remove() {
continue;
}
if (start != null && !start.containsLong(application.getStartTime())) {
if (start != null && !start.contains(application.getStartTime())) {
continue;
}
if (finish != null && !finish.containsLong(application.getFinishTime())) {
if (finish != null && !finish.contains(application.getFinishTime())) {
continue;
}

View File

@ -19,7 +19,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableSet;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceStability.Unstable;

View File

@ -125,7 +125,7 @@ public static ApplicationPlacementContext getPlacementContext(
public static ApplicationPlacementContext getPlacementContext(
QueueMappingEntity mapping, String leafQueueName) {
if (!org.apache.commons.lang.StringUtils.isEmpty(mapping.getParentQueue())) {
if (!org.apache.commons.lang3.StringUtils.isEmpty(mapping.getParentQueue())) {
return new ApplicationPlacementContext(leafQueueName,
mapping.getParentQueue());
} else{

View File

@ -22,7 +22,7 @@
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;

View File

@ -36,7 +36,7 @@
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;

View File

@ -28,7 +28,7 @@
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import org.apache.commons.lang.time.DateUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;

View File

@ -33,9 +33,9 @@
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateUtils;
import org.apache.commons.lang.time.FastDateFormat;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;

View File

@ -26,7 +26,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;

View File

@ -28,7 +28,7 @@
import java.util.concurrent.locks.ReentrantReadWriteLock;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;

View File

@ -33,8 +33,8 @@
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.LimitedPrivate;

View File

@ -23,7 +23,7 @@
import java.util.Map.Entry;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;

View File

@ -27,7 +27,7 @@
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;

View File

@ -26,7 +26,7 @@
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.yarn.nodelabels.CommonNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;

View File

@ -22,7 +22,7 @@
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.yarn.api.records.Container;

View File

@ -22,7 +22,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Maps;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.yarn.api.records.AllocationTagNamespaceType;

Some files were not shown because too many files have changed in this diff Show More