MAPREDUCE-7098. Upgrade common-langs version to 3.7 in hadoop-mapreduce-project
Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
This commit is contained in:
parent
02c4b89f99
commit
d1e2b80980
|
@ -36,7 +36,7 @@ import java.util.regex.Matcher;
|
|||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.lang.SystemUtils;
|
||||
import org.apache.commons.lang.time.FastDateFormat;
|
||||
import org.apache.commons.lang3.time.FastDateFormat;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
|
|
@ -40,7 +40,7 @@ import java.util.concurrent.Executors;
|
|||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.lang.time.FastDateFormat;
|
||||
import org.apache.commons.lang3.time.FastDateFormat;
|
||||
import org.apache.hadoop.test.UnitTestcaseTimeLimit;
|
||||
import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix;
|
||||
import org.junit.Test;
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
package org.apache.hadoop.mapreduce.v2.app.job.impl;
|
||||
|
||||
import static org.apache.commons.lang.StringUtils.isEmpty;
|
||||
import static org.apache.commons.lang3.StringUtils.isEmpty;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
|
|
|
@ -25,7 +25,7 @@ import java.net.URLDecoder;
|
|||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.mapreduce.JobACL;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||
|
|
|
@ -27,7 +27,7 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
|
|||
import java.util.EnumSet;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.commons.lang3.StringEscapeUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.mapreduce.MRConfig;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||
|
@ -134,8 +134,8 @@ public class TaskPage extends AppView {
|
|||
.append(getAttemptId(taskId, ta)).append("\",\"")
|
||||
.append(progress).append("\",\"")
|
||||
.append(ta.getState().toString()).append("\",\"")
|
||||
.append(StringEscapeUtils.escapeJavaScript(
|
||||
StringEscapeUtils.escapeHtml(ta.getStatus()))).append("\",\"")
|
||||
.append(StringEscapeUtils.escapeEcmaScript(
|
||||
StringEscapeUtils.escapeHtml4(ta.getStatus()))).append("\",\"")
|
||||
|
||||
.append(nodeHttpAddr == null ? "N/A" :
|
||||
"<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>"
|
||||
|
@ -151,8 +151,8 @@ public class TaskPage extends AppView {
|
|||
.append(ta.getStartTime()).append("\",\"")
|
||||
.append(ta.getFinishTime()).append("\",\"")
|
||||
.append(ta.getElapsedTime()).append("\",\"")
|
||||
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(
|
||||
diag)));
|
||||
.append(StringEscapeUtils.escapeEcmaScript(
|
||||
StringEscapeUtils.escapeHtml4(diag)));
|
||||
if (enableUIActions) {
|
||||
attemptsTableData.append("\",\"");
|
||||
if (EnumSet.of(
|
||||
|
|
|
@ -24,7 +24,7 @@ import static org.apache.hadoop.yarn.util.StringHelper.join;
|
|||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR_VALUE;
|
||||
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.commons.lang3.StringEscapeUtils;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
|
||||
|
@ -103,8 +103,8 @@ public class TasksBlock extends HtmlBlock {
|
|||
.append(join(pct, '%')).append("'> ").append("<div class='")
|
||||
.append(C_PROGRESSBAR_VALUE).append("' style='")
|
||||
.append(join("width:", pct, '%')).append("'> </div> </div>\",\"")
|
||||
.append(StringEscapeUtils.escapeJavaScript(
|
||||
StringEscapeUtils.escapeHtml(info.getStatus()))).append("\",\"")
|
||||
.append(StringEscapeUtils.escapeEcmaScript(
|
||||
StringEscapeUtils.escapeHtml4(info.getStatus()))).append("\",\"")
|
||||
|
||||
.append(info.getState()).append("\",\"")
|
||||
.append(info.getStartTime()).append("\",\"")
|
||||
|
|
|
@ -22,7 +22,7 @@ import static org.mockito.Mockito.*;
|
|||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.mapreduce.JobACL;
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.mapreduce.checkpoint;
|
||||
|
||||
import org.apache.commons.lang.RandomStringUtils;
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
|
||||
/**
|
||||
* Simple naming service that generates a random checkpoint name.
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.mapreduce.jobhistory;
|
||||
|
||||
import org.apache.commons.lang.time.FastDateFormat;
|
||||
import org.apache.commons.lang3.time.FastDateFormat;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.mapred.JobStatus;
|
||||
|
|
|
@ -23,7 +23,7 @@ import java.io.IOException;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
|
|
@ -22,7 +22,7 @@ import java.io.IOException;
|
|||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
|
|
@ -31,7 +31,7 @@ import java.util.HashSet;
|
|||
import java.util.Arrays;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
|
|
|
@ -22,7 +22,7 @@ import static org.junit.Assert.*;
|
|||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
import org.apache.commons.lang.ArrayUtils;
|
||||
import org.apache.commons.lang3.ArrayUtils;
|
||||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.NullWritable;
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.apache.hadoop.mapreduce.v2.hs.webapp;
|
|||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.commons.lang3.StringEscapeUtils;
|
||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
|
||||
|
@ -83,11 +83,11 @@ public class HsJobsBlock extends HtmlBlock {
|
|||
.append(dateFormat.format(new Date(job.getFinishTime()))).append("\",\"")
|
||||
.append("<a href='").append(url("job", job.getId())).append("'>")
|
||||
.append(job.getId()).append("</a>\",\"")
|
||||
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(
|
||||
.append(StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(
|
||||
job.getName()))).append("\",\"")
|
||||
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(
|
||||
.append(StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(
|
||||
job.getUserName()))).append("\",\"")
|
||||
.append(StringEscapeUtils.escapeJavaScript(StringEscapeUtils.escapeHtml(
|
||||
.append(StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(
|
||||
job.getQueueName()))).append("\",\"")
|
||||
.append(job.getState()).append("\",\"")
|
||||
.append(String.valueOf(job.getMapsTotal())).append("\",\"")
|
||||
|
|
|
@ -29,7 +29,7 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
|
|||
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.commons.lang3.StringEscapeUtils;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||
|
@ -147,8 +147,8 @@ public class HsTaskPage extends HsView {
|
|||
attemptsTableData.append("[\"")
|
||||
.append(getAttemptId(taskId, ta)).append("\",\"")
|
||||
.append(ta.getState()).append("\",\"")
|
||||
.append(StringEscapeUtils.escapeJavaScript(
|
||||
StringEscapeUtils.escapeHtml(ta.getStatus()))).append("\",\"")
|
||||
.append(StringEscapeUtils.escapeEcmaScript(
|
||||
StringEscapeUtils.escapeHtml4(ta.getStatus()))).append("\",\"")
|
||||
|
||||
.append("<a class='nodelink' href='" + MRWebAppUtil.getYARNWebappScheme() + nodeHttpAddr + "'>")
|
||||
.append(nodeRackName + "/" + nodeHttpAddr + "</a>\",\"")
|
||||
|
@ -171,8 +171,8 @@ public class HsTaskPage extends HsView {
|
|||
.append(elapsedReduceTime).append("\",\"");
|
||||
}
|
||||
attemptsTableData.append(attemptElapsed).append("\",\"")
|
||||
.append(StringEscapeUtils.escapeJavaScript(
|
||||
StringEscapeUtils.escapeHtml(ta.getNote())))
|
||||
.append(StringEscapeUtils.escapeEcmaScript(
|
||||
StringEscapeUtils.escapeHtml4(ta.getNote())))
|
||||
.append("\"],\n");
|
||||
}
|
||||
//Remove the last comma and close off the array of arrays
|
||||
|
|
|
@ -22,7 +22,7 @@ import java.io.IOException;
|
|||
import java.security.PrivilegedAction;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.mapreduce.JobID;
|
||||
|
|
|
@ -28,7 +28,7 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
|
|
|
@ -23,7 +23,7 @@ import java.net.InetSocketAddress;
|
|||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
|
||||
import org.apache.commons.lang.NotImplementedException;
|
||||
import org.apache.commons.lang3.NotImplementedException;
|
||||
import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
|
||||
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenRequest;
|
||||
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenResponse;
|
||||
|
@ -167,7 +167,7 @@ public class NotRunningJob implements MRClientProtocol {
|
|||
public GetTaskAttemptReportResponse getTaskAttemptReport(
|
||||
GetTaskAttemptReportRequest request) throws IOException {
|
||||
//not invoked by anybody
|
||||
throw new NotImplementedException();
|
||||
throw new NotImplementedException("Code is not implemented");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -222,26 +222,26 @@ public class NotRunningJob implements MRClientProtocol {
|
|||
public GetDelegationTokenResponse getDelegationToken(
|
||||
GetDelegationTokenRequest request) throws IOException {
|
||||
/* Should not be invoked by anyone. */
|
||||
throw new NotImplementedException();
|
||||
throw new NotImplementedException("Code is not implemented");
|
||||
}
|
||||
|
||||
@Override
|
||||
public RenewDelegationTokenResponse renewDelegationToken(
|
||||
RenewDelegationTokenRequest request) throws IOException {
|
||||
/* Should not be invoked by anyone. */
|
||||
throw new NotImplementedException();
|
||||
throw new NotImplementedException("Code is not implemented");
|
||||
}
|
||||
|
||||
@Override
|
||||
public CancelDelegationTokenResponse cancelDelegationToken(
|
||||
CancelDelegationTokenRequest request) throws IOException {
|
||||
/* Should not be invoked by anyone. */
|
||||
throw new NotImplementedException();
|
||||
throw new NotImplementedException("Code is not implemented");
|
||||
}
|
||||
|
||||
@Override
|
||||
public InetSocketAddress getConnectAddress() {
|
||||
/* Should not be invoked by anyone. Normally used to set token service */
|
||||
throw new NotImplementedException();
|
||||
throw new NotImplementedException("Code is not implemented");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import static org.apache.commons.lang.StringUtils.isEmpty;
|
||||
import static org.apache.commons.lang3.StringUtils.isEmpty;
|
||||
import static org.apache.hadoop.mapreduce.MRJobConfig.MR_AM_RESOURCE_PREFIX;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
|
@ -29,7 +29,7 @@ import java.util.Iterator;
|
|||
import java.util.List;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
import org.apache.commons.lang.RandomStringUtils;
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.hadoop.mapreduce.lib.db;
|
|||
import java.io.IOException;
|
||||
import java.lang.reflect.Field;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.io.NullWritable;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
|
|
|
@ -142,11 +142,6 @@
|
|||
<artifactId>commons-cli</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-lang</groupId>
|
||||
<artifactId>commons-lang</artifactId>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-collections</groupId>
|
||||
<artifactId>commons-collections</artifactId>
|
||||
|
|
Loading…
Reference in New Issue