HADOOP-15531. Use commons-text instead of commons-lang in some classes to fix deprecation warnings. Contributed by Takanobu Asanuma.
This commit is contained in:
parent
17118f446c
commit
88625f5cd9
|
@ -170,6 +170,10 @@
|
||||||
<groupId>org.apache.commons</groupId>
|
<groupId>org.apache.commons</groupId>
|
||||||
<artifactId>commons-lang3</artifactId>
|
<artifactId>commons-lang3</artifactId>
|
||||||
</exclusion>
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.apache.commons</groupId>
|
||||||
|
<artifactId>commons-text</artifactId>
|
||||||
|
</exclusion>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>commons-logging</groupId>
|
<groupId>commons-logging</groupId>
|
||||||
<artifactId>commons-logging</artifactId>
|
<artifactId>commons-logging</artifactId>
|
||||||
|
|
|
@ -171,6 +171,11 @@
|
||||||
<artifactId>commons-lang3</artifactId>
|
<artifactId>commons-lang3</artifactId>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.commons</groupId>
|
||||||
|
<artifactId>commons-text</artifactId>
|
||||||
|
<scope>compile</scope>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
<artifactId>slf4j-api</artifactId>
|
<artifactId>slf4j-api</artifactId>
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
package org.apache.hadoop.conf;
|
package org.apache.hadoop.conf;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Enumeration;
|
import java.util.Enumeration;
|
||||||
|
|
|
@ -31,7 +31,7 @@ import javax.servlet.http.HttpServlet;
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
|
|
@ -27,7 +27,7 @@ import com.google.common.collect.Lists;
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.commons.cli.Option;
|
import org.apache.commons.cli.Option;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.lang3.text.StrBuilder;
|
import org.apache.commons.text.TextStringBuilder;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||||
|
@ -491,7 +491,7 @@ public abstract class Command extends Configured implements Closeable {
|
||||||
/**
|
/**
|
||||||
* Put output line to log and string buffer.
|
* Put output line to log and string buffer.
|
||||||
* */
|
* */
|
||||||
protected void recordOutput(final StrBuilder result,
|
protected void recordOutput(final TextStringBuilder result,
|
||||||
final String outputLine) {
|
final String outputLine) {
|
||||||
LOG.info(outputLine);
|
LOG.info(outputLine);
|
||||||
result.appendln(outputLine);
|
result.appendln(outputLine);
|
||||||
|
@ -501,7 +501,7 @@ public abstract class Command extends Configured implements Closeable {
|
||||||
* Parse top number of nodes to be processed.
|
* Parse top number of nodes to be processed.
|
||||||
* @return top number of nodes to be processed.
|
* @return top number of nodes to be processed.
|
||||||
*/
|
*/
|
||||||
protected int parseTopNodes(final CommandLine cmd, final StrBuilder result)
|
protected int parseTopNodes(final CommandLine cmd, final TextStringBuilder result)
|
||||||
throws IllegalArgumentException {
|
throws IllegalArgumentException {
|
||||||
String outputLine = "";
|
String outputLine = "";
|
||||||
int nodes = 0;
|
int nodes = 0;
|
||||||
|
|
|
@ -23,7 +23,7 @@ import com.google.common.base.Throwables;
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.commons.cli.HelpFormatter;
|
import org.apache.commons.cli.HelpFormatter;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.lang3.text.StrBuilder;
|
import org.apache.commons.text.TextStringBuilder;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -89,7 +89,7 @@ public class PlanCommand extends Command {
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void execute(CommandLine cmd) throws Exception {
|
public void execute(CommandLine cmd) throws Exception {
|
||||||
StrBuilder result = new StrBuilder();
|
TextStringBuilder result = new TextStringBuilder();
|
||||||
String outputLine = "";
|
String outputLine = "";
|
||||||
LOG.debug("Processing Plan Command.");
|
LOG.debug("Processing Plan Command.");
|
||||||
Preconditions.checkState(cmd.hasOption(DiskBalancerCLI.PLAN));
|
Preconditions.checkState(cmd.hasOption(DiskBalancerCLI.PLAN));
|
||||||
|
|
|
@ -25,7 +25,7 @@ import java.util.ListIterator;
|
||||||
import org.apache.commons.cli.CommandLine;
|
import org.apache.commons.cli.CommandLine;
|
||||||
import org.apache.commons.cli.HelpFormatter;
|
import org.apache.commons.cli.HelpFormatter;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.commons.lang3.text.StrBuilder;
|
import org.apache.commons.text.TextStringBuilder;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.server.diskbalancer.DiskBalancerException;
|
import org.apache.hadoop.hdfs.server.diskbalancer.DiskBalancerException;
|
||||||
import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode;
|
import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode;
|
||||||
|
@ -67,7 +67,7 @@ public class ReportCommand extends Command {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void execute(CommandLine cmd) throws Exception {
|
public void execute(CommandLine cmd) throws Exception {
|
||||||
StrBuilder result = new StrBuilder();
|
TextStringBuilder result = new TextStringBuilder();
|
||||||
String outputLine = "Processing report command";
|
String outputLine = "Processing report command";
|
||||||
recordOutput(result, outputLine);
|
recordOutput(result, outputLine);
|
||||||
|
|
||||||
|
@ -99,7 +99,7 @@ public class ReportCommand extends Command {
|
||||||
getPrintStream().println(result.toString());
|
getPrintStream().println(result.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
private void handleTopReport(final CommandLine cmd, final StrBuilder result,
|
private void handleTopReport(final CommandLine cmd, final TextStringBuilder result,
|
||||||
final String nodeFormat) throws IllegalArgumentException {
|
final String nodeFormat) throws IllegalArgumentException {
|
||||||
Collections.sort(getCluster().getNodes(), Collections.reverseOrder());
|
Collections.sort(getCluster().getNodes(), Collections.reverseOrder());
|
||||||
|
|
||||||
|
@ -131,7 +131,7 @@ public class ReportCommand extends Command {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void handleNodeReport(final CommandLine cmd, StrBuilder result,
|
private void handleNodeReport(final CommandLine cmd, TextStringBuilder result,
|
||||||
final String nodeFormat, final String volumeFormat) throws Exception {
|
final String nodeFormat, final String volumeFormat) throws Exception {
|
||||||
String outputLine = "";
|
String outputLine = "";
|
||||||
/*
|
/*
|
||||||
|
@ -175,7 +175,7 @@ public class ReportCommand extends Command {
|
||||||
/**
|
/**
|
||||||
* Put node report lines to string buffer.
|
* Put node report lines to string buffer.
|
||||||
*/
|
*/
|
||||||
private void recordNodeReport(StrBuilder result, DiskBalancerDataNode dbdn,
|
private void recordNodeReport(TextStringBuilder result, DiskBalancerDataNode dbdn,
|
||||||
final String nodeFormat, final String volumeFormat) throws Exception {
|
final String nodeFormat, final String volumeFormat) throws Exception {
|
||||||
final String trueStr = "True";
|
final String trueStr = "True";
|
||||||
final String falseStr = "False";
|
final String falseStr = "False";
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.server.namenode;
|
package org.apache.hadoop.hdfs.server.namenode;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringEscapeUtils.escapeJava;
|
import static org.apache.commons.text.StringEscapeUtils.escapeJava;
|
||||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT;
|
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT;
|
||||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
|
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
|
||||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_CALLER_CONTEXT_ENABLED_DEFAULT;
|
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_CALLER_CONTEXT_ENABLED_DEFAULT;
|
||||||
|
|
|
@ -22,7 +22,7 @@ import java.util.EnumSet;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.lang3.text.WordUtils;
|
import org.apache.commons.text.WordUtils;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
|
|
|
@ -38,7 +38,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
|
||||||
import com.google.common.base.Supplier;
|
import com.google.common.base.Supplier;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import org.apache.commons.lang3.text.StrBuilder;
|
import org.apache.commons.text.TextStringBuilder;
|
||||||
import org.apache.hadoop.fs.BlockLocation;
|
import org.apache.hadoop.fs.BlockLocation;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
|
@ -661,7 +661,7 @@ public class TestDecommission extends AdminStatesBaseTest {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String scanIntoString(final ByteArrayOutputStream baos) {
|
private static String scanIntoString(final ByteArrayOutputStream baos) {
|
||||||
final StrBuilder sb = new StrBuilder();
|
final TextStringBuilder sb = new TextStringBuilder();
|
||||||
final Scanner scanner = new Scanner(baos.toString());
|
final Scanner scanner = new Scanner(baos.toString());
|
||||||
while (scanner.hasNextLine()) {
|
while (scanner.hasNextLine()) {
|
||||||
sb.appendln(scanner.nextLine());
|
sb.appendln(scanner.nextLine());
|
||||||
|
|
|
@ -27,7 +27,7 @@ import com.google.common.base.Supplier;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
import org.apache.commons.io.FileUtils;
|
import org.apache.commons.io.FileUtils;
|
||||||
import org.apache.commons.lang3.text.StrBuilder;
|
import org.apache.commons.text.TextStringBuilder;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -518,7 +518,7 @@ public class TestDFSAdmin {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String scanIntoString(final ByteArrayOutputStream baos) {
|
private static String scanIntoString(final ByteArrayOutputStream baos) {
|
||||||
final StrBuilder sb = new StrBuilder();
|
final TextStringBuilder sb = new TextStringBuilder();
|
||||||
final Scanner scanner = new Scanner(baos.toString());
|
final Scanner scanner = new Scanner(baos.toString());
|
||||||
while (scanner.hasNextLine()) {
|
while (scanner.hasNextLine()) {
|
||||||
sb.appendln(scanner.nextLine());
|
sb.appendln(scanner.nextLine());
|
||||||
|
|
|
@ -27,7 +27,7 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.MRConfig;
|
import org.apache.hadoop.mapreduce.MRConfig;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
|
|
@ -24,7 +24,7 @@ import static org.apache.hadoop.yarn.util.StringHelper.join;
|
||||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR;
|
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR;
|
||||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR_VALUE;
|
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR_VALUE;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
|
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
|
||||||
|
|
|
@ -21,7 +21,7 @@ package org.apache.hadoop.mapreduce.v2.hs.webapp;
|
||||||
import java.text.SimpleDateFormat;
|
import java.text.SimpleDateFormat;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.MRConfig;
|
import org.apache.hadoop.mapreduce.MRConfig;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
|
|
@ -29,7 +29,7 @@ import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
|
||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||||
|
|
|
@ -1069,6 +1069,11 @@
|
||||||
<artifactId>commons-lang3</artifactId>
|
<artifactId>commons-lang3</artifactId>
|
||||||
<version>3.7</version>
|
<version>3.7</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.commons</groupId>
|
||||||
|
<artifactId>commons-text</artifactId>
|
||||||
|
<version>1.4</version>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
<artifactId>slf4j-api</artifactId>
|
<artifactId>slf4j-api</artifactId>
|
||||||
|
|
|
@ -867,7 +867,8 @@ public class TopCLI extends YarnCLI {
|
||||||
TimeUnit.MILLISECONDS.toMinutes(uptime)
|
TimeUnit.MILLISECONDS.toMinutes(uptime)
|
||||||
- TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(uptime));
|
- TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(uptime));
|
||||||
String uptimeStr = String.format("%dd, %d:%d", days, hours, minutes);
|
String uptimeStr = String.format("%dd, %d:%d", days, hours, minutes);
|
||||||
String currentTime = DateFormatUtils.ISO_TIME_NO_T_FORMAT.format(now);
|
String currentTime = DateFormatUtils.ISO_8601_EXTENDED_TIME_FORMAT
|
||||||
|
.format(now);
|
||||||
|
|
||||||
ret.append(CLEAR_LINE);
|
ret.append(CLEAR_LINE);
|
||||||
ret.append(limitLineLength(String.format(
|
ret.append(limitLineLength(String.format(
|
||||||
|
|
|
@ -26,7 +26,7 @@ import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||||
|
|
||||||
@Private
|
@Private
|
||||||
|
|
|
@ -28,7 +28,7 @@ import java.util.EnumSet;
|
||||||
import static java.util.EnumSet.*;
|
import static java.util.EnumSet.*;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringEscapeUtils.*;
|
import static org.apache.commons.text.StringEscapeUtils.*;
|
||||||
import static org.apache.hadoop.yarn.webapp.hamlet.HamletImpl.EOpt.*;
|
import static org.apache.hadoop.yarn.webapp.hamlet.HamletImpl.EOpt.*;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
|
|
@ -28,7 +28,7 @@ import java.util.EnumSet;
|
||||||
import static java.util.EnumSet.*;
|
import static java.util.EnumSet.*;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringEscapeUtils.*;
|
import static org.apache.commons.text.StringEscapeUtils.*;
|
||||||
import static org.apache.hadoop.yarn.webapp.hamlet2.HamletImpl.EOpt.*;
|
import static org.apache.hadoop.yarn.webapp.hamlet2.HamletImpl.EOpt.*;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.webapp.view;
|
package org.apache.hadoop.yarn.webapp.view;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringEscapeUtils.escapeEcmaScript;
|
import static org.apache.commons.text.StringEscapeUtils.escapeEcmaScript;
|
||||||
import static org.apache.hadoop.yarn.util.StringHelper.djoin;
|
import static org.apache.hadoop.yarn.util.StringHelper.djoin;
|
||||||
import static org.apache.hadoop.yarn.util.StringHelper.join;
|
import static org.apache.hadoop.yarn.util.StringHelper.join;
|
||||||
import static org.apache.hadoop.yarn.util.StringHelper.split;
|
import static org.apache.hadoop.yarn.util.StringHelper.split;
|
||||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.hadoop.yarn.webapp.view;
|
||||||
|
|
||||||
import java.io.PrintWriter;
|
import java.io.PrintWriter;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.yarn.webapp.View;
|
import org.apache.hadoop.yarn.webapp.View;
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ import java.security.PrivilegedExceptionAction;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
|
import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
|
||||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
|
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
|
||||||
|
|
|
@ -28,7 +28,7 @@ import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
|
|
@ -32,7 +32,7 @@ import java.util.Collection;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.commons.lang3.Range;
|
import org.apache.commons.lang3.Range;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
|
|
@ -29,7 +29,7 @@ import java.util.Map;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.ConcurrentMap;
|
import java.util.concurrent.ConcurrentMap;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
|
|
@ -27,7 +27,7 @@ import java.io.IOException;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
|
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
|
||||||
|
|
|
@ -25,7 +25,7 @@ import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
|
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
|
||||||
|
|
|
@ -26,7 +26,7 @@ import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.commons.lang3.StringEscapeUtils;
|
import org.apache.commons.text.StringEscapeUtils;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
|
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
|
|
@ -18,8 +18,8 @@
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.server.router.webapp;
|
package org.apache.hadoop.yarn.server.router.webapp;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringEscapeUtils.escapeHtml4;
|
import static org.apache.commons.text.StringEscapeUtils.escapeHtml4;
|
||||||
import static org.apache.commons.lang3.StringEscapeUtils.escapeEcmaScript;
|
import static org.apache.commons.text.StringEscapeUtils.escapeEcmaScript;
|
||||||
import static org.apache.hadoop.yarn.util.StringHelper.join;
|
import static org.apache.hadoop.yarn.util.StringHelper.join;
|
||||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR;
|
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR;
|
||||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR_VALUE;
|
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.C_PROGRESSBAR_VALUE;
|
||||||
|
|
Loading…
Reference in New Issue