HADOOP-15495. Upgrade commons-lang version to 3.7 in hadoop-common-project and hadoop-tools. Contributed by Takanobu Asanuma.

This commit is contained in:
Akira Ajisaka 2018-06-28 14:29:40 +09:00
parent 8752a48564
commit 2b2399d623
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
102 changed files with 207 additions and 168 deletions

View File

@ -166,10 +166,6 @@
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</exclusion>
<exclusion>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</exclusion>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
@ -495,10 +491,6 @@
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</exclusion>
<exclusion>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
</exclusion>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>

View File

@ -156,11 +156,6 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.conf;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import java.util.Collection;
import java.util.Enumeration;
@ -72,10 +72,10 @@ private Reconfigurable getReconfigurable(HttpServletRequest req) {
private void printHeader(PrintWriter out, String nodeName) {
out.print("<html><head>");
out.printf("<title>%s Reconfiguration Utility</title>%n",
StringEscapeUtils.escapeHtml(nodeName));
StringEscapeUtils.escapeHtml4(nodeName));
out.print("</head><body>\n");
out.printf("<h1>%s Reconfiguration Utility</h1>%n",
StringEscapeUtils.escapeHtml(nodeName));
StringEscapeUtils.escapeHtml4(nodeName));
}
private void printFooter(PrintWriter out) {
@ -103,20 +103,20 @@ private void printConf(PrintWriter out, Reconfigurable reconf) {
out.print("<tr><td>");
if (!reconf.isPropertyReconfigurable(c.prop)) {
out.print("<font color=\"red\">" +
StringEscapeUtils.escapeHtml(c.prop) + "</font>");
StringEscapeUtils.escapeHtml4(c.prop) + "</font>");
changeOK = false;
} else {
out.print(StringEscapeUtils.escapeHtml(c.prop));
out.print(StringEscapeUtils.escapeHtml4(c.prop));
out.print("<input type=\"hidden\" name=\"" +
StringEscapeUtils.escapeHtml(c.prop) + "\" value=\"" +
StringEscapeUtils.escapeHtml(c.newVal) + "\"/>");
StringEscapeUtils.escapeHtml4(c.prop) + "\" value=\"" +
StringEscapeUtils.escapeHtml4(c.newVal) + "\"/>");
}
out.print("</td><td>" +
(c.oldVal == null ? "<it>default</it>" :
StringEscapeUtils.escapeHtml(c.oldVal)) +
StringEscapeUtils.escapeHtml4(c.oldVal)) +
"</td><td>" +
(c.newVal == null ? "<it>default</it>" :
StringEscapeUtils.escapeHtml(c.newVal)) +
StringEscapeUtils.escapeHtml4(c.newVal)) +
"</td>");
out.print("</tr>\n");
}
@ -147,9 +147,9 @@ private void applyChanges(PrintWriter out, Reconfigurable reconf,
synchronized(oldConf) {
while (params.hasMoreElements()) {
String rawParam = params.nextElement();
String param = StringEscapeUtils.unescapeHtml(rawParam);
String param = StringEscapeUtils.unescapeHtml4(rawParam);
String value =
StringEscapeUtils.unescapeHtml(req.getParameter(rawParam));
StringEscapeUtils.unescapeHtml4(req.getParameter(rawParam));
if (value != null) {
if (value.equals(newConf.getRaw(param)) || value.equals("default") ||
value.equals("null") || value.isEmpty()) {
@ -157,8 +157,8 @@ private void applyChanges(PrintWriter out, Reconfigurable reconf,
value.isEmpty()) &&
oldConf.getRaw(param) != null) {
out.println("<p>Changed \"" +
StringEscapeUtils.escapeHtml(param) + "\" from \"" +
StringEscapeUtils.escapeHtml(oldConf.getRaw(param)) +
StringEscapeUtils.escapeHtml4(param) + "\" from \"" +
StringEscapeUtils.escapeHtml4(oldConf.getRaw(param)) +
"\" to default</p>");
reconf.reconfigureProperty(param, null);
} else if (!value.equals("default") && !value.equals("null") &&
@ -168,16 +168,16 @@ private void applyChanges(PrintWriter out, Reconfigurable reconf,
// change from default or value to different value
if (oldConf.getRaw(param) == null) {
out.println("<p>Changed \"" +
StringEscapeUtils.escapeHtml(param) +
StringEscapeUtils.escapeHtml4(param) +
"\" from default to \"" +
StringEscapeUtils.escapeHtml(value) + "\"</p>");
StringEscapeUtils.escapeHtml4(value) + "\"</p>");
} else {
out.println("<p>Changed \"" +
StringEscapeUtils.escapeHtml(param) + "\" from \"" +
StringEscapeUtils.escapeHtml(oldConf.
StringEscapeUtils.escapeHtml4(param) + "\" from \"" +
StringEscapeUtils.escapeHtml4(oldConf.
getRaw(param)) +
"\" to \"" +
StringEscapeUtils.escapeHtml(value) + "\"</p>");
StringEscapeUtils.escapeHtml4(value) + "\"</p>");
}
reconf.reconfigureProperty(param, value);
} else {
@ -185,10 +185,10 @@ private void applyChanges(PrintWriter out, Reconfigurable reconf,
}
} else {
// parameter value != newConf value
out.println("<p>\"" + StringEscapeUtils.escapeHtml(param) +
out.println("<p>\"" + StringEscapeUtils.escapeHtml4(param) +
"\" not changed because value has changed from \"" +
StringEscapeUtils.escapeHtml(value) + "\" to \"" +
StringEscapeUtils.escapeHtml(newConf.getRaw(param)) +
StringEscapeUtils.escapeHtml4(value) + "\" to \"" +
StringEscapeUtils.escapeHtml4(newConf.getRaw(param)) +
"\" since approval</p>");
}
}

View File

@ -33,8 +33,8 @@
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -27,7 +27,7 @@
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.key.KeyProvider.Metadata;

View File

@ -23,7 +23,6 @@
import java.util.Arrays;
import java.util.LinkedList;
import org.apache.commons.lang.WordUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
@ -275,7 +274,7 @@ private void printInstanceHelp(PrintStream out, Command instance) {
listing = null;
}
for (String descLine : WordUtils.wrap(
for (String descLine : StringUtils.wrap(
line, MAX_LINE_WIDTH, "\n", true).split("\n")) {
out.println(prefix + descLine);
}

View File

@ -27,7 +27,7 @@
import java.util.regex.Pattern;
import org.apache.avro.reflect.Stringable;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -23,7 +23,7 @@
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.io;
import com.google.common.collect.ComparisonChain;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import java.nio.ByteBuffer;
import java.util.Map;

View File

@ -22,8 +22,8 @@
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -21,7 +21,7 @@
import java.io.IOException;
import java.io.FileDescriptor;
import org.apache.commons.lang.SystemUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.slf4j.Logger;

View File

@ -17,8 +17,8 @@
*/
package org.apache.hadoop.ipc;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -39,7 +39,7 @@
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.AtomicDoubleArray;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.metrics2.MetricsCollector;
@ -429,7 +429,7 @@ private void decayCurrentCounts() {
updateAverageResponseTime(true);
} catch (Exception ex) {
LOG.error("decayCurrentCounts exception: " +
ExceptionUtils.getFullStackTrace(ex));
ExceptionUtils.getStackTrace(ex));
throw ex;
}
}

View File

@ -32,7 +32,7 @@
import java.util.concurrent.atomic.AtomicLong;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.CallQueueManager.CallQueueOverflowException;
import org.apache.hadoop.metrics2.util.MBeans;
@ -286,7 +286,7 @@ public int size() {
*/
@Override
public Iterator<E> iterator() {
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
/**

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.metrics2;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.codehaus.jackson.map.ObjectMapper;

View File

@ -21,7 +21,7 @@
import java.lang.reflect.Method;
import static com.google.common.base.Preconditions.*;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsRecordBuilder;

View File

@ -21,7 +21,7 @@
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsException;

View File

@ -26,7 +26,7 @@
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;

View File

@ -32,7 +32,7 @@
import java.util.function.Function;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.metrics2.lib;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsInfo;

View File

@ -37,7 +37,7 @@
import java.util.regex.Pattern;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.lang.time.FastDateFormat;
import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -28,7 +28,7 @@
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -28,7 +28,7 @@
import java.nio.channels.ReadableByteChannel;
import java.nio.ByteBuffer;
import org.apache.commons.lang.SystemUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.util.CloseableReferenceCount;

View File

@ -32,7 +32,7 @@
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.lang.SystemUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.hadoop.util.NativeCodeLoader;
import com.google.common.annotations.VisibleForTesting;

View File

@ -25,7 +25,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -27,7 +27,7 @@
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.tools.CommandShell;

View File

@ -34,7 +34,7 @@
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import com.google.common.annotations.VisibleForTesting;
import java.util.stream.Collectors;

View File

@ -26,7 +26,7 @@
import java.util.Date;
import java.util.ServiceLoader;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;

View File

@ -20,8 +20,7 @@
import java.util.ArrayList;
import java.util.LinkedList;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.WordUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
/**
@ -103,7 +102,8 @@ String[] getRow(int idx) {
// Line-wrap if it's too long
String[] lines = new String[] {raw};
if (wrap) {
lines = WordUtils.wrap(lines[0], wrapWidth, "\n", true).split("\n");
lines = org.apache.hadoop.util.StringUtils.wrap(lines[0], wrapWidth,
"\n", true).split("\n");
}
for (int i=0; i<lines.length; i++) {
if (justification == Justification.LEFT) {

View File

@ -35,7 +35,7 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.SystemUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -987,7 +987,7 @@ public static String camelize(String s) {
String[] words = split(StringUtils.toLowerCase(s), ESCAPE_CHAR, '_');
for (String word : words)
sb.append(org.apache.commons.lang.StringUtils.capitalize(word));
sb.append(org.apache.commons.lang3.StringUtils.capitalize(word));
return sb.toString();
}
@ -1183,4 +1183,64 @@ public static boolean isAlpha(String str) {
return true;
}
/**
* Same as WordUtils#wrap in commons-lang 2.6. Unlike commons-lang3, leading
* spaces on the first line are NOT stripped.
*
* @param str the String to be word wrapped, may be null
* @param wrapLength the column to wrap the words at, less than 1 is treated
* as 1
* @param newLineStr the string to insert for a new line,
* <code>null</code> uses the system property line separator
* @param wrapLongWords true if long words (such as URLs) should be wrapped
* @return a line with newlines inserted, <code>null</code> if null input
*/
public static String wrap(String str, int wrapLength, String newLineStr,
boolean wrapLongWords) {
if(str == null) {
return null;
} else {
if(newLineStr == null) {
newLineStr = System.lineSeparator();
}
if(wrapLength < 1) {
wrapLength = 1;
}
int inputLineLength = str.length();
int offset = 0;
StringBuffer wrappedLine = new StringBuffer(inputLineLength + 32);
while(inputLineLength - offset > wrapLength) {
if(str.charAt(offset) == 32) {
++offset;
} else {
int spaceToWrapAt = str.lastIndexOf(32, wrapLength + offset);
if(spaceToWrapAt >= offset) {
wrappedLine.append(str.substring(offset, spaceToWrapAt));
wrappedLine.append(newLineStr);
offset = spaceToWrapAt + 1;
} else if(wrapLongWords) {
wrappedLine.append(str.substring(offset, wrapLength + offset));
wrappedLine.append(newLineStr);
offset += wrapLength;
} else {
spaceToWrapAt = str.indexOf(32, wrapLength + offset);
if(spaceToWrapAt >= 0) {
wrappedLine.append(str.substring(offset, spaceToWrapAt));
wrappedLine.append(newLineStr);
offset = spaceToWrapAt + 1;
} else {
wrappedLine.append(str.substring(offset));
offset = inputLineLength;
}
}
}
}
wrappedLine.append(str.substring(offset));
return wrappedLine.toString();
}
}
}

View File

@ -62,7 +62,7 @@
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.*;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration.IntegerRanges;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.conf;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;

View File

@ -20,7 +20,7 @@
import java.io.IOException;
import java.util.Arrays;
import org.apache.commons.lang.SystemUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
import org.junit.Assume;

View File

@ -19,7 +19,7 @@
import java.io.IOException;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.fs.Options.CreateOpts;
import org.apache.hadoop.test.GenericTestUtils;

View File

@ -22,7 +22,6 @@
import java.io.FileNotFoundException;
import java.util.EnumSet;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.fs.Options.CreateOpts;
import org.apache.hadoop.fs.Options.CreateOpts.BlockSize;
import org.apache.hadoop.io.IOUtils;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.fs;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.After;
import org.junit.Before;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs;
import org.apache.commons.lang.math.RandomUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.fs.StorageStatistics.LongStatistic;
import org.junit.Before;
@ -67,15 +67,15 @@ public class TestFileSystemStorageStatistics {
@Before
public void setup() {
statistics.incrementBytesRead(RandomUtils.nextInt(100));
statistics.incrementBytesWritten(RandomUtils.nextInt(100));
statistics.incrementLargeReadOps(RandomUtils.nextInt(100));
statistics.incrementWriteOps(RandomUtils.nextInt(100));
statistics.incrementBytesRead(RandomUtils.nextInt(0, 100));
statistics.incrementBytesWritten(RandomUtils.nextInt(0, 100));
statistics.incrementLargeReadOps(RandomUtils.nextInt(0, 100));
statistics.incrementWriteOps(RandomUtils.nextInt(0, 100));
statistics.incrementBytesReadByDistance(0, RandomUtils.nextInt(100));
statistics.incrementBytesReadByDistance(1, RandomUtils.nextInt(100));
statistics.incrementBytesReadByDistance(3, RandomUtils.nextInt(100));
statistics.incrementBytesReadErasureCoded(RandomUtils.nextInt(100));
statistics.incrementBytesReadByDistance(0, RandomUtils.nextInt(0, 100));
statistics.incrementBytesReadByDistance(1, RandomUtils.nextInt(0, 100));
statistics.incrementBytesReadByDistance(3, RandomUtils.nextInt(0, 100));
statistics.incrementBytesReadErasureCoded(RandomUtils.nextInt(0, 100));
}
@Test

View File

@ -17,8 +17,8 @@
*/
package org.apache.hadoop.fs.shell;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang.math.RandomUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
@ -56,11 +56,11 @@ public static int initialize(Path dir) throws Exception {
fs.mkdirs(toDirPath);
int numTotalFiles = 0;
int numDirs = RandomUtils.nextInt(5);
int numDirs = RandomUtils.nextInt(0, 5);
for (int dirCount = 0; dirCount < numDirs; ++dirCount) {
Path subDirPath = new Path(fromDirPath, "subdir" + dirCount);
fs.mkdirs(subDirPath);
int numFiles = RandomUtils.nextInt(10);
int numFiles = RandomUtils.nextInt(0, 10);
for (int fileCount = 0; fileCount < numFiles; ++fileCount) {
numTotalFiles++;
Path subFile = new Path(subDirPath, "file" + fileCount);
@ -115,7 +115,7 @@ public void testCopyFromLocalWithThreads() throws Exception {
Path dir = new Path("dir" + RandomStringUtils.randomNumeric(4));
int numFiles = TestCopyFromLocal.initialize(dir);
int maxThreads = Runtime.getRuntime().availableProcessors() * 2;
int randThreads = RandomUtils.nextInt(maxThreads - 1) + 1;
int randThreads = RandomUtils.nextInt(0, maxThreads - 1) + 1;
String numThreads = Integer.toString(randThreads);
run(new TestMultiThreadedCopy(randThreads,
randThreads == 1 ? 0 : numFiles), "-t", numThreads,

View File

@ -26,7 +26,7 @@
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.apache.commons.lang.SystemUtils;
import org.apache.commons.lang3.SystemUtils;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.test.GenericTestUtils;

View File

@ -20,7 +20,7 @@
import com.google.protobuf.BlockingService;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.ipc.metrics.RpcMetrics;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.ipc;
import com.google.protobuf.ServiceException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;

View File

@ -38,7 +38,7 @@
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.KerberosAuthException;

View File

@ -43,7 +43,7 @@
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.unix.DomainSocket.DomainChannel;
import org.apache.hadoop.test.GenericTestUtils;

View File

@ -21,7 +21,7 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.lang.mutable.MutableBoolean;
import org.apache.commons.lang3.mutable.MutableBoolean;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.AuthenticationException;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.service.launcher;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.service.Service;
import org.apache.hadoop.service.ServiceOperations;

View File

@ -40,7 +40,7 @@
import java.util.regex.Pattern;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.fs.FileUtil;
@ -344,7 +344,7 @@ public static void assertExceptionContains(String expectedText,
throw new AssertionError(E_NULL_THROWABLE_STRING, t);
}
if (expectedText != null && !msg.contains(expectedText)) {
String prefix = org.apache.commons.lang.StringUtils.isEmpty(message)
String prefix = org.apache.commons.lang3.StringUtils.isEmpty(message)
? "" : (message + ": ");
throw new AssertionError(
String.format("%s Expected to find '%s' %s: %s",

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.util;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.slf4j.LoggerFactory;
import org.junit.Assert;
import org.junit.Test;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.util;
import org.apache.commons.lang.SystemUtils;
import org.apache.commons.lang3.SystemUtils;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;

View File

@ -332,7 +332,7 @@ private void addMetric(String operationName, long value, boolean isWrite) {
private static String getMetricName(String operationName, boolean isWrite) {
return (isWrite ? WRITE_LOCK_METRIC_PREFIX : READ_LOCK_METRIC_PREFIX) +
org.apache.commons.lang.StringUtils.capitalize(operationName) +
org.apache.commons.lang3.StringUtils.capitalize(operationName) +
LOCK_METRIC_SUFFIX;
}
}

View File

@ -1043,11 +1043,6 @@
<artifactId>junit</artifactId>
<version>4.11</version>
</dependency>
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.6</version>
</dependency>
<dependency>
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>

View File

@ -22,7 +22,7 @@
import com.aliyun.oss.common.auth.CredentialsProvider;
import com.aliyun.oss.common.auth.DefaultCredentials;
import com.aliyun.oss.common.auth.InvalidCredentialsException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import java.io.IOException;

View File

@ -30,7 +30,7 @@
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.CreateFlag;

View File

@ -45,7 +45,7 @@
import com.aliyun.oss.model.UploadPartRequest;
import com.aliyun.oss.model.UploadPartResult;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;

View File

@ -23,7 +23,7 @@
import com.aliyun.oss.common.auth.CredentialsProvider;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.security.ProviderUtils;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.aliyun.oss;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.junit.internal.AssumptionViolatedException;

View File

@ -23,7 +23,7 @@
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.AnonymousAWSCredentials;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.IOUtils;

View File

@ -21,7 +21,7 @@
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.auth.AWSCredentials;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -121,8 +121,8 @@
import static org.apache.hadoop.fs.s3a.Invoker.*;
import static org.apache.hadoop.fs.s3a.S3AUtils.*;
import static org.apache.hadoop.fs.s3a.Statistic.*;
import static org.apache.commons.lang.StringUtils.isNotBlank;
import static org.apache.commons.lang.StringUtils.isNotEmpty;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static org.apache.commons.lang3.StringUtils.isNotEmpty;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -24,7 +24,7 @@
import com.amazonaws.services.s3.model.S3ObjectInputStream;
import com.amazonaws.services.s3.model.SSECustomerKey;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.CanSetReadahead;

View File

@ -35,7 +35,7 @@
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -21,7 +21,7 @@
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -21,7 +21,7 @@
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.BasicSessionCredentials;
import com.amazonaws.auth.AWSCredentials;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import java.io.IOException;
import java.net.URI;

View File

@ -32,7 +32,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -33,7 +33,7 @@
import com.amazonaws.services.s3.model.PartETag;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem;

View File

@ -28,7 +28,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem;

View File

@ -28,7 +28,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3a.WriteOperationHelper;

View File

@ -30,7 +30,7 @@
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.UncheckedExecutionException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;

View File

@ -26,7 +26,7 @@
import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -60,7 +60,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -23,7 +23,7 @@
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;

View File

@ -39,7 +39,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileStatus;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.s3native;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -31,7 +31,7 @@
import java.net.URLDecoder;
import java.util.Objects;
import static org.apache.commons.lang.StringUtils.equalsIgnoreCase;
import static org.apache.commons.lang3.StringUtils.equalsIgnoreCase;
/**
* Class to aid logging in to S3 endpoints.

View File

@ -22,8 +22,8 @@
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.S3ClientOptions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.reflect.FieldUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.reflect.FieldUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;

View File

@ -20,7 +20,7 @@
import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
/**

View File

@ -20,7 +20,7 @@
import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
/**

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.s3a;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -28,7 +28,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

View File

@ -42,7 +42,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -25,7 +25,7 @@
import com.microsoft.azure.datalake.store.oauth2.DeviceCodeTokenProvider;
import com.microsoft.azure.datalake.store.oauth2.MsiTokenProvider;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.adl.common.CustomMockTokenProvider;
import org.apache.hadoop.fs.adl.oauth2.AzureADTokenProvider;

View File

@ -41,7 +41,7 @@
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;

View File

@ -44,7 +44,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FSExceptionMessages;
import org.apache.commons.codec.binary.Base64;

View File

@ -20,7 +20,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.classification.InterfaceAudience;

View File

@ -48,7 +48,7 @@
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectReader;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;

View File

@ -36,7 +36,7 @@
import org.apache.hadoop.fs.Syncable;
import org.apache.hadoop.fs.azure.StorageInterface.CloudPageBlobWrapper;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -23,7 +23,7 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.azure.security.Constants;
import org.apache.hadoop.io.retry.RetryPolicy;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.azure;
import org.apache.commons.lang.Validate;
import org.apache.commons.lang3.Validate;
import org.apache.hadoop.fs.azure.security.Constants;
import org.apache.hadoop.fs.azure.security.SpnegoToken;
import org.apache.hadoop.fs.azure.security.WasbDelegationTokenIdentifier;

View File

@ -29,7 +29,7 @@
import java.net.HttpURLConnection;
import java.util.Arrays;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.azure.AzureNativeFileSystemStore.TestHookOperationContext;

View File

@ -22,7 +22,7 @@
import java.net.URI;
import java.util.Arrays;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;

View File

@ -36,7 +36,7 @@
import java.util.List;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.codec.net.URLCodec;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.http.client.utils.URIBuilder;
import com.microsoft.azure.storage.AccessCondition;
@ -339,7 +339,7 @@ public Iterable<ListBlobItem> listBlobs(String prefix,
@Override
public StorageUri getStorageUri() {
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
}
@ -590,20 +590,20 @@ public MockCloudPageBlobWrapper(URI uri, HashMap<String, String> metadata,
@Override
public void create(long length, BlobRequestOptions options,
OperationContext opContext) throws StorageException {
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
@Override
public void uploadPages(InputStream sourceStream, long offset, long length,
BlobRequestOptions options, OperationContext opContext)
throws StorageException, IOException {
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
@Override
public ArrayList<PageRange> downloadPageRanges(BlobRequestOptions options,
OperationContext opContext) throws StorageException {
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
@Override
@ -622,7 +622,7 @@ public void setWriteBlockSizeInBytes(int writeBlockSizeInBytes) {
@Override
public StorageUri getStorageUri() {
throw new NotImplementedException();
throw new NotImplementedException("Code is not implemented");
}
@Override

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.fs.azure;
import com.microsoft.azure.storage.blob.BlockEntry;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;

View File

@ -28,7 +28,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileSystem;

View File

@ -20,7 +20,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -28,7 +28,7 @@
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;

View File

@ -22,7 +22,7 @@
import java.io.IOException;
import java.util.EnumSet;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.mapred.gridmix;
import org.apache.commons.lang.time.FastDateFormat;
import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -19,7 +19,7 @@
import java.io.IOException;
import org.apache.commons.lang.time.FastDateFormat;
import org.apache.commons.lang3.time.FastDateFormat;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@ -76,7 +76,7 @@ class ExecutionSummarizer implements StatListener<JobStats> {
startTime = System.currentTimeMillis();
// flatten the args string and store it
commandLineArgs =
org.apache.commons.lang.StringUtils.join(args, ' ');
org.apache.commons.lang3.StringUtils.join(args, ' ');
}
/**

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.mapred.gridmix;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -21,7 +21,7 @@
import java.util.List;
import java.util.Random;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;

View File

@ -30,7 +30,6 @@
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.CharSet;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.resourceestimator.common.api.RecurrenceId;
import org.apache.hadoop.resourceestimator.common.api.ResourceSkyline;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.tools.rumen.anonymization;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
/**
* Utility class to handle commonly performed tasks in a

Some files were not shown because too many files have changed in this diff Show More