HADOOP-15495. Upgrade commons-lang version to 3.7 in hadoop-common-project and hadoop-tools. Contributed by Takanobu Asanuma.

This commit is contained in:
Akira Ajisaka 2018-06-28 14:29:40 +09:00
parent 8752a48564
commit 2b2399d623
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
102 changed files with 207 additions and 168 deletions

View File

@ -166,10 +166,6 @@
<groupId>commons-io</groupId> <groupId>commons-io</groupId>
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>commons-logging</groupId> <groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId> <artifactId>commons-logging</artifactId>
@ -495,10 +491,6 @@
<groupId>commons-codec</groupId> <groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId> <artifactId>commons-codec</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>commons-logging</groupId> <groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId> <artifactId>commons-logging</artifactId>

View File

@ -156,11 +156,6 @@
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<scope>compile</scope>
</dependency>
<dependency> <dependency>
<groupId>commons-beanutils</groupId> <groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId> <artifactId>commons-beanutils</artifactId>

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.conf; package org.apache.hadoop.conf;
import org.apache.commons.lang.StringEscapeUtils; import org.apache.commons.lang3.StringEscapeUtils;
import java.util.Collection; import java.util.Collection;
import java.util.Enumeration; import java.util.Enumeration;
@ -72,10 +72,10 @@ public class ReconfigurationServlet extends HttpServlet {
private void printHeader(PrintWriter out, String nodeName) { private void printHeader(PrintWriter out, String nodeName) {
out.print("<html><head>"); out.print("<html><head>");
out.printf("<title>%s Reconfiguration Utility</title>%n", out.printf("<title>%s Reconfiguration Utility</title>%n",
StringEscapeUtils.escapeHtml(nodeName)); StringEscapeUtils.escapeHtml4(nodeName));
out.print("</head><body>\n"); out.print("</head><body>\n");
out.printf("<h1>%s Reconfiguration Utility</h1>%n", out.printf("<h1>%s Reconfiguration Utility</h1>%n",
StringEscapeUtils.escapeHtml(nodeName)); StringEscapeUtils.escapeHtml4(nodeName));
} }
private void printFooter(PrintWriter out) { private void printFooter(PrintWriter out) {
@ -103,20 +103,20 @@ public class ReconfigurationServlet extends HttpServlet {
out.print("<tr><td>"); out.print("<tr><td>");
if (!reconf.isPropertyReconfigurable(c.prop)) { if (!reconf.isPropertyReconfigurable(c.prop)) {
out.print("<font color=\"red\">" + out.print("<font color=\"red\">" +
StringEscapeUtils.escapeHtml(c.prop) + "</font>"); StringEscapeUtils.escapeHtml4(c.prop) + "</font>");
changeOK = false; changeOK = false;
} else { } else {
out.print(StringEscapeUtils.escapeHtml(c.prop)); out.print(StringEscapeUtils.escapeHtml4(c.prop));
out.print("<input type=\"hidden\" name=\"" + out.print("<input type=\"hidden\" name=\"" +
StringEscapeUtils.escapeHtml(c.prop) + "\" value=\"" + StringEscapeUtils.escapeHtml4(c.prop) + "\" value=\"" +
StringEscapeUtils.escapeHtml(c.newVal) + "\"/>"); StringEscapeUtils.escapeHtml4(c.newVal) + "\"/>");
} }
out.print("</td><td>" + out.print("</td><td>" +
(c.oldVal == null ? "<it>default</it>" : (c.oldVal == null ? "<it>default</it>" :
StringEscapeUtils.escapeHtml(c.oldVal)) + StringEscapeUtils.escapeHtml4(c.oldVal)) +
"</td><td>" + "</td><td>" +
(c.newVal == null ? "<it>default</it>" : (c.newVal == null ? "<it>default</it>" :
StringEscapeUtils.escapeHtml(c.newVal)) + StringEscapeUtils.escapeHtml4(c.newVal)) +
"</td>"); "</td>");
out.print("</tr>\n"); out.print("</tr>\n");
} }
@ -147,9 +147,9 @@ public class ReconfigurationServlet extends HttpServlet {
synchronized(oldConf) { synchronized(oldConf) {
while (params.hasMoreElements()) { while (params.hasMoreElements()) {
String rawParam = params.nextElement(); String rawParam = params.nextElement();
String param = StringEscapeUtils.unescapeHtml(rawParam); String param = StringEscapeUtils.unescapeHtml4(rawParam);
String value = String value =
StringEscapeUtils.unescapeHtml(req.getParameter(rawParam)); StringEscapeUtils.unescapeHtml4(req.getParameter(rawParam));
if (value != null) { if (value != null) {
if (value.equals(newConf.getRaw(param)) || value.equals("default") || if (value.equals(newConf.getRaw(param)) || value.equals("default") ||
value.equals("null") || value.isEmpty()) { value.equals("null") || value.isEmpty()) {
@ -157,8 +157,8 @@ public class ReconfigurationServlet extends HttpServlet {
value.isEmpty()) && value.isEmpty()) &&
oldConf.getRaw(param) != null) { oldConf.getRaw(param) != null) {
out.println("<p>Changed \"" + out.println("<p>Changed \"" +
StringEscapeUtils.escapeHtml(param) + "\" from \"" + StringEscapeUtils.escapeHtml4(param) + "\" from \"" +
StringEscapeUtils.escapeHtml(oldConf.getRaw(param)) + StringEscapeUtils.escapeHtml4(oldConf.getRaw(param)) +
"\" to default</p>"); "\" to default</p>");
reconf.reconfigureProperty(param, null); reconf.reconfigureProperty(param, null);
} else if (!value.equals("default") && !value.equals("null") && } else if (!value.equals("default") && !value.equals("null") &&
@ -168,16 +168,16 @@ public class ReconfigurationServlet extends HttpServlet {
// change from default or value to different value // change from default or value to different value
if (oldConf.getRaw(param) == null) { if (oldConf.getRaw(param) == null) {
out.println("<p>Changed \"" + out.println("<p>Changed \"" +
StringEscapeUtils.escapeHtml(param) + StringEscapeUtils.escapeHtml4(param) +
"\" from default to \"" + "\" from default to \"" +
StringEscapeUtils.escapeHtml(value) + "\"</p>"); StringEscapeUtils.escapeHtml4(value) + "\"</p>");
} else { } else {
out.println("<p>Changed \"" + out.println("<p>Changed \"" +
StringEscapeUtils.escapeHtml(param) + "\" from \"" + StringEscapeUtils.escapeHtml4(param) + "\" from \"" +
StringEscapeUtils.escapeHtml(oldConf. StringEscapeUtils.escapeHtml4(oldConf.
getRaw(param)) + getRaw(param)) +
"\" to \"" + "\" to \"" +
StringEscapeUtils.escapeHtml(value) + "\"</p>"); StringEscapeUtils.escapeHtml4(value) + "\"</p>");
} }
reconf.reconfigureProperty(param, value); reconf.reconfigureProperty(param, value);
} else { } else {
@ -185,10 +185,10 @@ public class ReconfigurationServlet extends HttpServlet {
} }
} else { } else {
// parameter value != newConf value // parameter value != newConf value
out.println("<p>\"" + StringEscapeUtils.escapeHtml(param) + out.println("<p>\"" + StringEscapeUtils.escapeHtml4(param) +
"\" not changed because value has changed from \"" + "\" not changed because value has changed from \"" +
StringEscapeUtils.escapeHtml(value) + "\" to \"" + StringEscapeUtils.escapeHtml4(value) + "\" to \"" +
StringEscapeUtils.escapeHtml(newConf.getRaw(param)) + StringEscapeUtils.escapeHtml4(newConf.getRaw(param)) +
"\" since approval</p>"); "\" since approval</p>");
} }
} }

View File

@ -33,8 +33,8 @@ import java.util.Map;
import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter; import com.google.gson.stream.JsonWriter;
import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -27,7 +27,7 @@ import java.util.Map;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.key.KeyProvider.Metadata; import org.apache.hadoop.crypto.key.KeyProvider.Metadata;

View File

@ -23,7 +23,6 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.LinkedList; import java.util.LinkedList;
import org.apache.commons.lang.WordUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
@ -275,7 +274,7 @@ public class FsShell extends Configured implements Tool {
listing = null; listing = null;
} }
for (String descLine : WordUtils.wrap( for (String descLine : StringUtils.wrap(
line, MAX_LINE_WIDTH, "\n", true).split("\n")) { line, MAX_LINE_WIDTH, "\n", true).split("\n")) {
out.println(prefix + descLine); out.println(prefix + descLine);
} }

View File

@ -27,7 +27,7 @@ import java.net.URISyntaxException;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.avro.reflect.Stringable; import org.apache.avro.reflect.Stringable;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;

View File

@ -23,7 +23,7 @@ import java.util.Arrays;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.io; package org.apache.hadoop.io;
import com.google.common.collect.ComparisonChain; import com.google.common.collect.ComparisonChain;
import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Map; import java.util.Map;

View File

@ -22,8 +22,8 @@ import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;

View File

@ -21,7 +21,7 @@ import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.FileDescriptor; import java.io.FileDescriptor;
import org.apache.commons.lang.SystemUtils; import org.apache.commons.lang3.SystemUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.slf4j.Logger; import org.slf4j.Logger;

View File

@ -17,8 +17,8 @@
*/ */
package org.apache.hadoop.ipc; package org.apache.hadoop.ipc;
import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;

View File

@ -39,7 +39,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter; import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.AtomicDoubleArray; import com.google.common.util.concurrent.AtomicDoubleArray;
import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsCollector;
@ -429,7 +429,7 @@ public class DecayRpcScheduler implements RpcScheduler,
updateAverageResponseTime(true); updateAverageResponseTime(true);
} catch (Exception ex) { } catch (Exception ex) {
LOG.error("decayCurrentCounts exception: " + LOG.error("decayCurrentCounts exception: " +
ExceptionUtils.getFullStackTrace(ex)); ExceptionUtils.getStackTrace(ex));
throw ex; throw ex;
} }
} }

View File

@ -32,7 +32,7 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.CallQueueManager.CallQueueOverflowException; import org.apache.hadoop.ipc.CallQueueManager.CallQueueOverflowException;
import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.metrics2.util.MBeans;
@ -286,7 +286,7 @@ public class FairCallQueue<E extends Schedulable> extends AbstractQueue<E>
*/ */
@Override @Override
public Iterator<E> iterator() { public Iterator<E> iterator() {
throw new NotImplementedException(); throw new NotImplementedException("Code is not implemented");
} }
/** /**

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.metrics2; package org.apache.hadoop.metrics2;
import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.ObjectMapper;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.metrics2.lib;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import static com.google.common.base.Preconditions.*; import static com.google.common.base.Preconditions.*;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.metrics2.lib;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsException;

View File

@ -26,7 +26,7 @@ import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsInfo;

View File

@ -32,7 +32,7 @@ import java.util.concurrent.TimeUnit;
import java.util.function.Function; import java.util.function.Function;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsInfo;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.metrics2.lib; package org.apache.hadoop.metrics2.lib;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsInfo;

View File

@ -37,7 +37,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.configuration2.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.lang.time.FastDateFormat; import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -28,7 +28,7 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -28,7 +28,7 @@ import java.nio.channels.ClosedChannelException;
import java.nio.channels.ReadableByteChannel; import java.nio.channels.ReadableByteChannel;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.commons.lang.SystemUtils; import org.apache.commons.lang3.SystemUtils;
import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.util.CloseableReferenceCount; import org.apache.hadoop.util.CloseableReferenceCount;

View File

@ -32,7 +32,7 @@ import java.util.Map;
import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.lang.SystemUtils; import org.apache.commons.lang3.SystemUtils;
import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.NativeCodeLoader;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;

View File

@ -25,7 +25,7 @@ import java.util.concurrent.TimeUnit;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -27,7 +27,7 @@ import java.util.List;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.tools.CommandShell; import org.apache.hadoop.tools.CommandShell;

View File

@ -34,7 +34,7 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import java.util.stream.Collectors; import java.util.stream.Collectors;

View File

@ -26,7 +26,7 @@ import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.ServiceLoader; import java.util.ServiceLoader;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;

View File

@ -20,8 +20,7 @@ package org.apache.hadoop.tools;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.LinkedList; import java.util.LinkedList;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.WordUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
/** /**
@ -103,7 +102,8 @@ public class TableListing {
// Line-wrap if it's too long // Line-wrap if it's too long
String[] lines = new String[] {raw}; String[] lines = new String[] {raw};
if (wrap) { if (wrap) {
lines = WordUtils.wrap(lines[0], wrapWidth, "\n", true).split("\n"); lines = org.apache.hadoop.util.StringUtils.wrap(lines[0], wrapWidth,
"\n", true).split("\n");
} }
for (int i=0; i<lines.length; i++) { for (int i=0; i<lines.length; i++) {
if (justification == Justification.LEFT) { if (justification == Justification.LEFT) {

View File

@ -35,7 +35,7 @@ import java.util.StringTokenizer;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.lang.SystemUtils; import org.apache.commons.lang3.SystemUtils;
import org.apache.commons.lang3.time.FastDateFormat; import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
@ -987,7 +987,7 @@ public class StringUtils {
String[] words = split(StringUtils.toLowerCase(s), ESCAPE_CHAR, '_'); String[] words = split(StringUtils.toLowerCase(s), ESCAPE_CHAR, '_');
for (String word : words) for (String word : words)
sb.append(org.apache.commons.lang.StringUtils.capitalize(word)); sb.append(org.apache.commons.lang3.StringUtils.capitalize(word));
return sb.toString(); return sb.toString();
} }
@ -1183,4 +1183,64 @@ public class StringUtils {
return true; return true;
} }
/**
* Same as WordUtils#wrap in commons-lang 2.6. Unlike commons-lang3, leading
* spaces on the first line are NOT stripped.
*
* @param str the String to be word wrapped, may be null
* @param wrapLength the column to wrap the words at, less than 1 is treated
* as 1
* @param newLineStr the string to insert for a new line,
* <code>null</code> uses the system property line separator
* @param wrapLongWords true if long words (such as URLs) should be wrapped
* @return a line with newlines inserted, <code>null</code> if null input
*/
public static String wrap(String str, int wrapLength, String newLineStr,
boolean wrapLongWords) {
if(str == null) {
return null;
} else {
if(newLineStr == null) {
newLineStr = System.lineSeparator();
}
if(wrapLength < 1) {
wrapLength = 1;
}
int inputLineLength = str.length();
int offset = 0;
StringBuffer wrappedLine = new StringBuffer(inputLineLength + 32);
while(inputLineLength - offset > wrapLength) {
if(str.charAt(offset) == 32) {
++offset;
} else {
int spaceToWrapAt = str.lastIndexOf(32, wrapLength + offset);
if(spaceToWrapAt >= offset) {
wrappedLine.append(str.substring(offset, spaceToWrapAt));
wrappedLine.append(newLineStr);
offset = spaceToWrapAt + 1;
} else if(wrapLongWords) {
wrappedLine.append(str.substring(offset, wrapLength + offset));
wrappedLine.append(newLineStr);
offset += wrapLength;
} else {
spaceToWrapAt = str.indexOf(32, wrapLength + offset);
if(spaceToWrapAt >= 0) {
wrappedLine.append(str.substring(offset, spaceToWrapAt));
wrappedLine.append(newLineStr);
offset = spaceToWrapAt + 1;
} else {
wrappedLine.append(str.substring(offset));
offset = inputLineLength;
}
}
}
}
wrappedLine.append(str.substring(offset));
return wrappedLine.toString();
}
}
} }

View File

@ -62,7 +62,7 @@ import static org.apache.hadoop.conf.StorageUnit.TB;
import static org.hamcrest.core.Is.is; import static org.hamcrest.core.Is.is;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration.IntegerRanges; import org.apache.hadoop.conf.Configuration.IntegerRanges;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.conf; package org.apache.hadoop.conf;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.junit.Before; import org.junit.Before;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.crypto.random;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import org.apache.commons.lang.SystemUtils; import org.apache.commons.lang3.SystemUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.util.Shell.ShellCommandExecutor;
import org.junit.Assume; import org.junit.Assume;

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.fs;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.fs.Options.CreateOpts; import org.apache.hadoop.fs.Options.CreateOpts;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;

View File

@ -22,7 +22,6 @@ import java.io.IOException;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.util.EnumSet; import java.util.EnumSet;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.fs.Options.CreateOpts; import org.apache.hadoop.fs.Options.CreateOpts;
import org.apache.hadoop.fs.Options.CreateOpts.BlockSize; import org.apache.hadoop.fs.Options.CreateOpts.BlockSize;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.fs; package org.apache.hadoop.fs;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs; package org.apache.hadoop.fs;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.fs.StorageStatistics.LongStatistic; import org.apache.hadoop.fs.StorageStatistics.LongStatistic;
import org.junit.Before; import org.junit.Before;
@ -67,15 +67,15 @@ public class TestFileSystemStorageStatistics {
@Before @Before
public void setup() { public void setup() {
statistics.incrementBytesRead(RandomUtils.nextInt(100)); statistics.incrementBytesRead(RandomUtils.nextInt(0, 100));
statistics.incrementBytesWritten(RandomUtils.nextInt(100)); statistics.incrementBytesWritten(RandomUtils.nextInt(0, 100));
statistics.incrementLargeReadOps(RandomUtils.nextInt(100)); statistics.incrementLargeReadOps(RandomUtils.nextInt(0, 100));
statistics.incrementWriteOps(RandomUtils.nextInt(100)); statistics.incrementWriteOps(RandomUtils.nextInt(0, 100));
statistics.incrementBytesReadByDistance(0, RandomUtils.nextInt(100)); statistics.incrementBytesReadByDistance(0, RandomUtils.nextInt(0, 100));
statistics.incrementBytesReadByDistance(1, RandomUtils.nextInt(100)); statistics.incrementBytesReadByDistance(1, RandomUtils.nextInt(0, 100));
statistics.incrementBytesReadByDistance(3, RandomUtils.nextInt(100)); statistics.incrementBytesReadByDistance(3, RandomUtils.nextInt(0, 100));
statistics.incrementBytesReadErasureCoded(RandomUtils.nextInt(100)); statistics.incrementBytesReadErasureCoded(RandomUtils.nextInt(0, 100));
} }
@Test @Test

View File

@ -17,8 +17,8 @@
*/ */
package org.apache.hadoop.fs.shell; package org.apache.hadoop.fs.shell;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang.math.RandomUtils; import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -56,11 +56,11 @@ public class TestCopyFromLocal {
fs.mkdirs(toDirPath); fs.mkdirs(toDirPath);
int numTotalFiles = 0; int numTotalFiles = 0;
int numDirs = RandomUtils.nextInt(5); int numDirs = RandomUtils.nextInt(0, 5);
for (int dirCount = 0; dirCount < numDirs; ++dirCount) { for (int dirCount = 0; dirCount < numDirs; ++dirCount) {
Path subDirPath = new Path(fromDirPath, "subdir" + dirCount); Path subDirPath = new Path(fromDirPath, "subdir" + dirCount);
fs.mkdirs(subDirPath); fs.mkdirs(subDirPath);
int numFiles = RandomUtils.nextInt(10); int numFiles = RandomUtils.nextInt(0, 10);
for (int fileCount = 0; fileCount < numFiles; ++fileCount) { for (int fileCount = 0; fileCount < numFiles; ++fileCount) {
numTotalFiles++; numTotalFiles++;
Path subFile = new Path(subDirPath, "file" + fileCount); Path subFile = new Path(subDirPath, "file" + fileCount);
@ -115,7 +115,7 @@ public class TestCopyFromLocal {
Path dir = new Path("dir" + RandomStringUtils.randomNumeric(4)); Path dir = new Path("dir" + RandomStringUtils.randomNumeric(4));
int numFiles = TestCopyFromLocal.initialize(dir); int numFiles = TestCopyFromLocal.initialize(dir);
int maxThreads = Runtime.getRuntime().availableProcessors() * 2; int maxThreads = Runtime.getRuntime().availableProcessors() * 2;
int randThreads = RandomUtils.nextInt(maxThreads - 1) + 1; int randThreads = RandomUtils.nextInt(0, maxThreads - 1) + 1;
String numThreads = Integer.toString(randThreads); String numThreads = Integer.toString(randThreads);
run(new TestMultiThreadedCopy(randThreads, run(new TestMultiThreadedCopy(randThreads,
randThreads == 1 ? 0 : numFiles), "-t", numThreads, randThreads == 1 ? 0 : numFiles), "-t", numThreads,

View File

@ -26,7 +26,7 @@ import org.junit.Assert;
import org.junit.Assume; import org.junit.Assume;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.apache.commons.lang.SystemUtils; import org.apache.commons.lang3.SystemUtils;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.ipc;
import com.google.protobuf.BlockingService; import com.google.protobuf.BlockingService;
import com.google.protobuf.RpcController; import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException; import com.google.protobuf.ServiceException;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.ipc.metrics.RpcMetrics; import org.apache.hadoop.ipc.metrics.RpcMetrics;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.ipc; package org.apache.hadoop.ipc;
import com.google.protobuf.ServiceException; import com.google.protobuf.ServiceException;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;

View File

@ -38,7 +38,7 @@ import java.util.Enumeration;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.KerberosAuthException; import org.apache.hadoop.security.KerberosAuthException;

View File

@ -43,7 +43,7 @@ import org.junit.Assume;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.unix.DomainSocket.DomainChannel; import org.apache.hadoop.net.unix.DomainSocket.DomainChannel;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;

View File

@ -21,7 +21,7 @@ import static org.apache.hadoop.security.token.delegation.web.DelegationTokenAut
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang.mutable.MutableBoolean; import org.apache.commons.lang3.mutable.MutableBoolean;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.AuthenticationException; import org.apache.hadoop.security.authentication.client.AuthenticationException;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.service.launcher; package org.apache.hadoop.service.launcher;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.service.Service; import org.apache.hadoop.service.Service;
import org.apache.hadoop.service.ServiceOperations; import org.apache.hadoop.service.ServiceOperations;

View File

@ -40,7 +40,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.impl.Log4JLogger; import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
@ -344,7 +344,7 @@ public abstract class GenericTestUtils {
throw new AssertionError(E_NULL_THROWABLE_STRING, t); throw new AssertionError(E_NULL_THROWABLE_STRING, t);
} }
if (expectedText != null && !msg.contains(expectedText)) { if (expectedText != null && !msg.contains(expectedText)) {
String prefix = org.apache.commons.lang.StringUtils.isEmpty(message) String prefix = org.apache.commons.lang3.StringUtils.isEmpty(message)
? "" : (message + ": "); ? "" : (message + ": ");
throw new AssertionError( throw new AssertionError(
String.format("%s Expected to find '%s' %s: %s", String.format("%s Expected to find '%s' %s: %s",

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.util; package org.apache.hadoop.util;
import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.util; package org.apache.hadoop.util;
import org.apache.commons.lang.SystemUtils; import org.apache.commons.lang3.SystemUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Assume; import org.junit.Assume;
import org.junit.Test; import org.junit.Test;

View File

@ -332,7 +332,7 @@ class FSNamesystemLock {
private static String getMetricName(String operationName, boolean isWrite) { private static String getMetricName(String operationName, boolean isWrite) {
return (isWrite ? WRITE_LOCK_METRIC_PREFIX : READ_LOCK_METRIC_PREFIX) + return (isWrite ? WRITE_LOCK_METRIC_PREFIX : READ_LOCK_METRIC_PREFIX) +
org.apache.commons.lang.StringUtils.capitalize(operationName) + org.apache.commons.lang3.StringUtils.capitalize(operationName) +
LOCK_METRIC_SUFFIX; LOCK_METRIC_SUFFIX;
} }
} }

View File

@ -1043,11 +1043,6 @@
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
<version>4.11</version> <version>4.11</version>
</dependency> </dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
</dependency>
<dependency> <dependency>
<groupId>commons-collections</groupId> <groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId> <artifactId>commons-collections</artifactId>

View File

@ -22,7 +22,7 @@ import com.aliyun.oss.common.auth.Credentials;
import com.aliyun.oss.common.auth.CredentialsProvider; import com.aliyun.oss.common.auth.CredentialsProvider;
import com.aliyun.oss.common.auth.DefaultCredentials; import com.aliyun.oss.common.auth.DefaultCredentials;
import com.aliyun.oss.common.auth.InvalidCredentialsException; import com.aliyun.oss.common.auth.InvalidCredentialsException;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import java.io.IOException; import java.io.IOException;

View File

@ -30,7 +30,7 @@ import java.util.concurrent.TimeUnit;
import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.CreateFlag;

View File

@ -45,7 +45,7 @@ import com.aliyun.oss.model.UploadPartCopyResult;
import com.aliyun.oss.model.UploadPartRequest; import com.aliyun.oss.model.UploadPartRequest;
import com.aliyun.oss.model.UploadPartResult; import com.aliyun.oss.model.UploadPartResult;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;

View File

@ -23,7 +23,7 @@ import java.io.IOException;
import com.aliyun.oss.common.auth.CredentialsProvider; import com.aliyun.oss.common.auth.CredentialsProvider;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.LocalDirAllocator; import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.security.ProviderUtils; import org.apache.hadoop.security.ProviderUtils;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.aliyun.oss; package org.apache.hadoop.fs.aliyun.oss;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.junit.internal.AssumptionViolatedException; import org.junit.internal.AssumptionViolatedException;

View File

@ -23,7 +23,7 @@ import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.AnonymousAWSCredentials; import com.amazonaws.auth.AnonymousAWSCredentials;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.fs.s3a;
import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentials;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;

View File

@ -121,8 +121,8 @@ import static org.apache.hadoop.fs.s3a.Constants.*;
import static org.apache.hadoop.fs.s3a.Invoker.*; import static org.apache.hadoop.fs.s3a.Invoker.*;
import static org.apache.hadoop.fs.s3a.S3AUtils.*; import static org.apache.hadoop.fs.s3a.S3AUtils.*;
import static org.apache.hadoop.fs.s3a.Statistic.*; import static org.apache.hadoop.fs.s3a.Statistic.*;
import static org.apache.commons.lang.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang.StringUtils.isNotEmpty; import static org.apache.commons.lang3.StringUtils.isNotEmpty;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -24,7 +24,7 @@ import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectInputStream; import com.amazonaws.services.s3.model.S3ObjectInputStream;
import com.amazonaws.services.s3.model.SSECustomerKey; import com.amazonaws.services.s3.model.SSECustomerKey;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.CanSetReadahead; import org.apache.hadoop.fs.CanSetReadahead;

View File

@ -35,7 +35,7 @@ import com.amazonaws.services.s3.model.S3ObjectSummary;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.fs.s3a;
import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.auth.BasicAWSCredentials;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.fs.s3a;
import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.BasicSessionCredentials; import com.amazonaws.auth.BasicSessionCredentials;
import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentials;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;

View File

@ -32,7 +32,7 @@ import com.google.common.annotations.VisibleForTesting;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -33,7 +33,7 @@ import java.util.Map;
import com.amazonaws.services.s3.model.PartETag; import com.amazonaws.services.s3.model.PartETag;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;

View File

@ -28,7 +28,7 @@ import java.util.Map;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;

View File

@ -28,7 +28,7 @@ import com.google.common.base.Preconditions;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3a.WriteOperationHelper; import org.apache.hadoop.fs.s3a.WriteOperationHelper;

View File

@ -30,7 +30,7 @@ import com.google.common.cache.CacheBuilder;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.google.common.util.concurrent.UncheckedExecutionException; import com.google.common.util.concurrent.UncheckedExecutionException;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;

View File

@ -26,7 +26,7 @@ import com.amazonaws.regions.Regions;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDB; import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder; import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -60,7 +60,7 @@ import com.google.common.base.Preconditions;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -23,7 +23,7 @@ import com.google.common.base.Preconditions;
import com.google.common.cache.Cache; import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheBuilder;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;

View File

@ -39,7 +39,7 @@ import com.google.common.base.Preconditions;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.s3native; package org.apache.hadoop.fs.s3native;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -31,7 +31,7 @@ import java.net.URISyntaxException;
import java.net.URLDecoder; import java.net.URLDecoder;
import java.util.Objects; import java.util.Objects;
import static org.apache.commons.lang.StringUtils.equalsIgnoreCase; import static org.apache.commons.lang3.StringUtils.equalsIgnoreCase;
/** /**
* Class to aid logging in to S3 endpoints. * Class to aid logging in to S3 endpoints.

View File

@ -22,8 +22,8 @@ import com.amazonaws.ClientConfiguration;
import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.S3ClientOptions; import com.amazonaws.services.s3.S3ClientOptions;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang.reflect.FieldUtils; import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.fs.s3a;
import static org.apache.hadoop.fs.contract.ContractTestUtils.skip; import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
/** /**

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.fs.s3a;
import static org.apache.hadoop.fs.contract.ContractTestUtils.skip; import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
/** /**

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.s3a; package org.apache.hadoop.fs.s3a;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -28,7 +28,7 @@ import org.junit.Assert;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;

View File

@ -42,7 +42,7 @@ import com.microsoft.azure.datalake.store.oauth2.RefreshTokenBasedTokenProvider;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -25,7 +25,7 @@ import java.net.URISyntaxException;
import com.microsoft.azure.datalake.store.oauth2.DeviceCodeTokenProvider; import com.microsoft.azure.datalake.store.oauth2.DeviceCodeTokenProvider;
import com.microsoft.azure.datalake.store.oauth2.MsiTokenProvider; import com.microsoft.azure.datalake.store.oauth2.MsiTokenProvider;
import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.adl.common.CustomMockTokenProvider; import org.apache.hadoop.fs.adl.common.CustomMockTokenProvider;
import org.apache.hadoop.fs.adl.oauth2.AzureADTokenProvider; import org.apache.hadoop.fs.adl.oauth2.AzureADTokenProvider;

View File

@ -41,7 +41,7 @@ import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;

View File

@ -44,7 +44,7 @@ import java.util.concurrent.atomic.AtomicReference;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FSExceptionMessages; import org.apache.hadoop.fs.FSExceptionMessages;
import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Base64;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.fs.azure;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;

View File

@ -48,7 +48,7 @@ import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -36,7 +36,7 @@ import java.util.concurrent.TimeUnit;
import org.apache.hadoop.fs.Syncable; import org.apache.hadoop.fs.Syncable;
import org.apache.hadoop.fs.azure.StorageInterface.CloudPageBlobWrapper; import org.apache.hadoop.fs.azure.StorageInterface.CloudPageBlobWrapper;
import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -23,7 +23,7 @@ import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.ObjectReader;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.azure.security.Constants; import org.apache.hadoop.fs.azure.security.Constants;
import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryPolicy;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.azure; package org.apache.hadoop.fs.azure;
import org.apache.commons.lang.Validate; import org.apache.commons.lang3.Validate;
import org.apache.hadoop.fs.azure.security.Constants; import org.apache.hadoop.fs.azure.security.Constants;
import org.apache.hadoop.fs.azure.security.SpnegoToken; import org.apache.hadoop.fs.azure.security.SpnegoToken;
import org.apache.hadoop.fs.azure.security.WasbDelegationTokenIdentifier; import org.apache.hadoop.fs.azure.security.WasbDelegationTokenIdentifier;

View File

@ -29,7 +29,7 @@ import java.io.OutputStream;
import java.net.HttpURLConnection; import java.net.HttpURLConnection;
import java.util.Arrays; import java.util.Arrays;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.azure.AzureNativeFileSystemStore.TestHookOperationContext; import org.apache.hadoop.fs.azure.AzureNativeFileSystemStore.TestHookOperationContext;

View File

@ -22,7 +22,7 @@ import java.io.IOException;
import java.net.URI; import java.net.URI;
import java.util.Arrays; import java.util.Arrays;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;

View File

@ -36,7 +36,7 @@ import java.util.TimeZone;
import java.util.List; import java.util.List;
import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.net.URLCodec; import org.apache.commons.codec.net.URLCodec;
import org.apache.commons.lang.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.http.client.utils.URIBuilder; import org.apache.http.client.utils.URIBuilder;
import com.microsoft.azure.storage.AccessCondition; import com.microsoft.azure.storage.AccessCondition;
@ -339,7 +339,7 @@ public class MockStorageInterface extends StorageInterface {
@Override @Override
public StorageUri getStorageUri() { public StorageUri getStorageUri() {
throw new NotImplementedException(); throw new NotImplementedException("Code is not implemented");
} }
} }
@ -590,20 +590,20 @@ public class MockStorageInterface extends StorageInterface {
@Override @Override
public void create(long length, BlobRequestOptions options, public void create(long length, BlobRequestOptions options,
OperationContext opContext) throws StorageException { OperationContext opContext) throws StorageException {
throw new NotImplementedException(); throw new NotImplementedException("Code is not implemented");
} }
@Override @Override
public void uploadPages(InputStream sourceStream, long offset, long length, public void uploadPages(InputStream sourceStream, long offset, long length,
BlobRequestOptions options, OperationContext opContext) BlobRequestOptions options, OperationContext opContext)
throws StorageException, IOException { throws StorageException, IOException {
throw new NotImplementedException(); throw new NotImplementedException("Code is not implemented");
} }
@Override @Override
public ArrayList<PageRange> downloadPageRanges(BlobRequestOptions options, public ArrayList<PageRange> downloadPageRanges(BlobRequestOptions options,
OperationContext opContext) throws StorageException { OperationContext opContext) throws StorageException {
throw new NotImplementedException(); throw new NotImplementedException("Code is not implemented");
} }
@Override @Override
@ -622,7 +622,7 @@ public class MockStorageInterface extends StorageInterface {
@Override @Override
public StorageUri getStorageUri() { public StorageUri getStorageUri() {
throw new NotImplementedException(); throw new NotImplementedException("Code is not implemented");
} }
@Override @Override

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.fs.azure; package org.apache.hadoop.fs.azure;
import com.microsoft.azure.storage.blob.BlockEntry; import com.microsoft.azure.storage.blob.BlockEntry;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;

View File

@ -28,7 +28,7 @@ import org.junit.internal.AssumptionViolatedException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.tools;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -28,7 +28,7 @@ import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;

View File

@ -22,7 +22,7 @@ import java.io.FileNotFoundException;
import java.io.IOException; import java.io.IOException;
import java.util.EnumSet; import java.util.EnumSet;
import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.mapred.gridmix; package org.apache.hadoop.mapred.gridmix;
import org.apache.commons.lang.time.FastDateFormat; import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.mapred.gridmix;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.lang.time.FastDateFormat; import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -76,7 +76,7 @@ class ExecutionSummarizer implements StatListener<JobStats> {
startTime = System.currentTimeMillis(); startTime = System.currentTimeMillis();
// flatten the args string and store it // flatten the args string and store it
commandLineArgs = commandLineArgs =
org.apache.commons.lang.StringUtils.join(args, ' '); org.apache.commons.lang3.StringUtils.join(args, ' ');
} }
/** /**

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.mapred.gridmix; package org.apache.hadoop.mapred.gridmix;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -21,7 +21,7 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -30,7 +30,6 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang.CharSet;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.resourceestimator.common.api.RecurrenceId; import org.apache.hadoop.resourceestimator.common.api.RecurrenceId;
import org.apache.hadoop.resourceestimator.common.api.ResourceSkyline; import org.apache.hadoop.resourceestimator.common.api.ResourceSkyline;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.tools.rumen.anonymization; package org.apache.hadoop.tools.rumen.anonymization;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang3.StringUtils;
/** /**
* Utility class to handle commonly performed tasks in a * Utility class to handle commonly performed tasks in a

Some files were not shown because too many files have changed in this diff Show More