diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java index 9733e45693c..0845a5f6f10 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java @@ -23,6 +23,7 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; +import java.nio.charset.StandardCharsets; import java.security.NoSuchAlgorithmException; import java.util.Collections; import java.util.Date; @@ -32,7 +33,6 @@ import java.util.Map; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; -import org.apache.commons.io.Charsets; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -209,7 +209,7 @@ public abstract class KeyProvider { protected byte[] serialize() throws IOException { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); JsonWriter writer = new JsonWriter( - new OutputStreamWriter(buffer, Charsets.UTF_8)); + new OutputStreamWriter(buffer, StandardCharsets.UTF_8)); try { writer.beginObject(); if (cipher != null) { @@ -252,8 +252,9 @@ public abstract class KeyProvider { int versions = 0; String description = null; Map attributes = null; - JsonReader reader = new JsonReader(new InputStreamReader - (new ByteArrayInputStream(bytes), Charsets.UTF_8)); + JsonReader reader = + new JsonReader(new InputStreamReader(new ByteArrayInputStream(bytes), + StandardCharsets.UTF_8)); try { reader.beginObject(); while (reader.hasNext()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java index 47549f7ee99..701e116f993 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java @@ -18,7 +18,6 @@ package org.apache.hadoop.crypto.key.kms; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.io.Charsets; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.crypto.key.KeyProvider; @@ -65,6 +64,7 @@ import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; import java.security.GeneralSecurityException; import java.security.NoSuchAlgorithmException; import java.security.PrivilegedExceptionAction; @@ -271,7 +271,7 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension, } private static void writeJson(Map map, OutputStream os) throws IOException { - Writer writer = new OutputStreamWriter(os, Charsets.UTF_8); + Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8); ObjectMapper jsonMapper = new ObjectMapper(); jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java index f0d7b8de445..e30e45c53b8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Display.java @@ -21,6 +21,7 @@ import java.io.ByteArrayOutputStream; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; +import java.nio.charset.StandardCharsets; import java.util.LinkedList; import java.util.zip.GZIPInputStream; @@ -32,7 +33,6 @@ import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.io.DatumWriter; import org.apache.avro.io.EncoderFactory; import org.apache.avro.io.JsonEncoder; -import org.apache.commons.io.Charsets; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -235,10 +235,10 @@ class Display extends FsCommand { if (!r.next(key, val)) { return -1; } - byte[] tmp = key.toString().getBytes(Charsets.UTF_8); + byte[] tmp = key.toString().getBytes(StandardCharsets.UTF_8); outbuf.write(tmp, 0, tmp.length); outbuf.write('\t'); - tmp = val.toString().getBytes(Charsets.UTF_8); + tmp = val.toString().getBytes(StandardCharsets.UTF_8); outbuf.write(tmp, 0, tmp.length); outbuf.write('\n'); inbuf.reset(outbuf.getData(), outbuf.getLength()); @@ -301,7 +301,7 @@ class Display extends FsCommand { if (!fileReader.hasNext()) { // Write a new line after the last Avro record. output.write(System.getProperty("line.separator") - .getBytes(Charsets.UTF_8)); + .getBytes(StandardCharsets.UTF_8)); output.flush(); } pos = 0; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/StreamPumper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/StreamPumper.java index 00c6401d88d..8018f43def3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/StreamPumper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/StreamPumper.java @@ -21,8 +21,8 @@ import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.Log; /** @@ -78,7 +78,7 @@ class StreamPumper { protected void pump() throws IOException { InputStreamReader inputStreamReader = new InputStreamReader( - stream, Charsets.UTF_8); + stream, StandardCharsets.UTF_8); BufferedReader br = new BufferedReader(inputStreamReader); String line = null; while ((line = br.readLine()) != null) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java index 57acebd85f4..51db21c185f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java @@ -17,21 +17,25 @@ */ package org.apache.hadoop.http; -import org.apache.commons.io.Charsets; - import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; +import java.nio.charset.StandardCharsets; /** * This class is responsible for quoting HTML characters. */ public class HtmlQuoting { - private static final byte[] ampBytes = "&".getBytes(Charsets.UTF_8); - private static final byte[] aposBytes = "'".getBytes(Charsets.UTF_8); - private static final byte[] gtBytes = ">".getBytes(Charsets.UTF_8); - private static final byte[] ltBytes = "<".getBytes(Charsets.UTF_8); - private static final byte[] quotBytes = """.getBytes(Charsets.UTF_8); + private static final byte[] AMP_BYTES = + "&".getBytes(StandardCharsets.UTF_8); + private static final byte[] APOS_BYTES = + "'".getBytes(StandardCharsets.UTF_8); + private static final byte[] GT_BYTES = + ">".getBytes(StandardCharsets.UTF_8); + private static final byte[] LT_BYTES = + "<".getBytes(StandardCharsets.UTF_8); + private static final byte[] QUOT_BYTES = + """.getBytes(StandardCharsets.UTF_8); /** * Does the given string need to be quoted? @@ -65,7 +69,7 @@ public class HtmlQuoting { if (str == null) { return false; } - byte[] bytes = str.getBytes(Charsets.UTF_8); + byte[] bytes = str.getBytes(StandardCharsets.UTF_8); return needsQuoting(bytes, 0 , bytes.length); } @@ -81,11 +85,21 @@ public class HtmlQuoting { int off, int len) throws IOException { for(int i=off; i < off+len; i++) { switch (buffer[i]) { - case '&': output.write(ampBytes); break; - case '<': output.write(ltBytes); break; - case '>': output.write(gtBytes); break; - case '\'': output.write(aposBytes); break; - case '"': output.write(quotBytes); break; + case '&': + output.write(AMP_BYTES); + break; + case '<': + output.write(LT_BYTES); + break; + case '>': + output.write(GT_BYTES); + break; + case '\'': + output.write(APOS_BYTES); + break; + case '"': + output.write(QUOT_BYTES); + break; default: output.write(buffer, i, 1); } } @@ -100,7 +114,7 @@ public class HtmlQuoting { if (item == null) { return null; } - byte[] bytes = item.getBytes(Charsets.UTF_8); + byte[] bytes = item.getBytes(StandardCharsets.UTF_8); if (needsQuoting(bytes, 0, bytes.length)) { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); try { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java index 3ba577fc4f4..7453996ecab 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DefaultStringifier.java @@ -19,11 +19,11 @@ package org.apache.hadoop.io; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.charset.UnsupportedCharsetException; import java.util.ArrayList; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.io.Charsets; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -91,7 +91,7 @@ public class DefaultStringifier implements Stringifier { serializer.serialize(obj); byte[] buf = new byte[outBuf.getLength()]; System.arraycopy(outBuf.getData(), 0, buf, 0, buf.length); - return new String(Base64.encodeBase64(buf), Charsets.UTF_8); + return new String(Base64.encodeBase64(buf), StandardCharsets.UTF_8); } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java index 0f3f3173ce1..d8bec4872ee 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java @@ -19,11 +19,11 @@ package org.apache.hadoop.io; import java.io.*; +import java.nio.charset.StandardCharsets; import java.util.*; import java.rmi.server.UID; import java.security.MessageDigest; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.*; import org.apache.hadoop.util.Options; import org.apache.hadoop.fs.*; @@ -853,7 +853,7 @@ public class SequenceFile { try { MessageDigest digester = MessageDigest.getInstance("MD5"); long time = Time.now(); - digester.update((new UID()+"@"+time).getBytes(Charsets.UTF_8)); + digester.update((new UID()+"@"+time).getBytes(StandardCharsets.UTF_8)); sync = digester.digest(); } catch (Exception e) { throw new RuntimeException(e); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java index 49dd9c184c5..bf78e0c953d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java @@ -22,8 +22,8 @@ import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.charset.StandardCharsets; -import org.apache.commons.io.Charsets; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; @@ -287,7 +287,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec { // The compressed bzip2 stream should start with the // identifying characters BZ. Caller of CBZip2OutputStream // i.e. this class must write these characters. - out.write(HEADER.getBytes(Charsets.UTF_8)); + out.write(HEADER.getBytes(StandardCharsets.UTF_8)); } } @@ -421,7 +421,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec { byte[] headerBytes = new byte[HEADER_LEN]; int actualRead = bufferedIn.read(headerBytes, 0, HEADER_LEN); if (actualRead != -1) { - String header = new String(headerBytes, Charsets.UTF_8); + String header = new String(headerBytes, StandardCharsets.UTF_8); if (header.compareTo(HEADER) != 0) { bufferedIn.reset(); } else { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java index aabdf57a266..84b92eceff0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFileDumper.java @@ -18,13 +18,13 @@ package org.apache.hadoop.io.file.tfile; import java.io.IOException; import java.io.PrintStream; +import java.nio.charset.StandardCharsets; import java.util.Collection; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -234,7 +234,7 @@ class TFileDumper { out.printf("%X", b); } } else { - out.print(new String(key, 0, sampleLen, Charsets.UTF_8)); + out.print(new String(key, 0, sampleLen, StandardCharsets.UTF_8)); } if (sampleLen < key.length) { out.print("..."); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java index d5e795b92f1..d38474af26b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcConstants.java @@ -18,8 +18,8 @@ package org.apache.hadoop.ipc; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; -import org.apache.commons.io.Charsets; import org.apache.hadoop.classification.InterfaceAudience; @InterfaceAudience.Private @@ -54,8 +54,8 @@ public class RpcConstants { /** * The first four bytes of Hadoop RPC connections */ - public static final ByteBuffer HEADER = ByteBuffer.wrap("hrpc".getBytes - (Charsets.UTF_8)); + public static final ByteBuffer HEADER = + ByteBuffer.wrap("hrpc".getBytes(StandardCharsets.UTF_8)); public static final int HEADER_LEN_AFTER_HRPC_PART = 3; // 3 bytes that follow // 1 : Introduce ping and server does not throw away RPCs diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index caa534c5563..405549af6df 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -47,6 +47,7 @@ import java.nio.channels.Selector; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.nio.channels.WritableByteChannel; +import java.nio.charset.StandardCharsets; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Arrays; @@ -69,7 +70,6 @@ import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -223,7 +223,7 @@ public abstract class Server { * and send back a nicer response. */ private static final ByteBuffer HTTP_GET_BYTES = ByteBuffer.wrap( - "GET ".getBytes(Charsets.UTF_8)); + "GET ".getBytes(StandardCharsets.UTF_8)); /** * An HTTP response to send back if we detect an HTTP request to our IPC @@ -1957,7 +1957,7 @@ public abstract class Server { private void setupHttpRequestOnIpcPortResponse() throws IOException { Call fakeCall = new Call(0, RpcConstants.INVALID_RETRY_COUNT, null, this); fakeCall.setResponse(ByteBuffer.wrap( - RECEIVED_HTTP_REQ_RESPONSE.getBytes(Charsets.UTF_8))); + RECEIVED_HTTP_REQ_RESPONSE.getBytes(StandardCharsets.UTF_8))); fakeCall.sendResponse(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java index e46a654e823..a61fa5b97bb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/GraphiteSink.java @@ -19,7 +19,6 @@ package org.apache.hadoop.metrics2.sink; import org.apache.commons.configuration.SubsetConfiguration; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -35,6 +34,7 @@ import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; import java.net.Socket; +import java.nio.charset.StandardCharsets; /** * A metrics sink that writes to a Graphite server @@ -150,7 +150,8 @@ public class GraphiteSink implements MetricsSink, Closeable { try { // Open a connection to Graphite server. socket = new Socket(serverHost, serverPort); - writer = new OutputStreamWriter(socket.getOutputStream(), Charsets.UTF_8); + writer = new OutputStreamWriter(socket.getOutputStream(), + StandardCharsets.UTF_8); } catch (Exception e) { connectionFailures++; if (tooManyConnectionFailures()) { diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java index c9df0ffcc3b..887cfff1032 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/sink/ganglia/AbstractGangliaSink.java @@ -20,12 +20,12 @@ package org.apache.hadoop.metrics2.sink.ganglia; import java.io.IOException; import java.net.*; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.configuration.SubsetConfiguration; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.MetricsSink; @@ -235,7 +235,7 @@ public abstract class AbstractGangliaSink implements MetricsSink { * @param s the string to be written to buffer at offset location */ protected void xdr_string(String s) { - byte[] bytes = s.getBytes(Charsets.UTF_8); + byte[] bytes = s.getBytes(StandardCharsets.UTF_8); int len = bytes.length; xdr_int(len); System.arraycopy(bytes, 0, buffer, offset, len); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java index 59c0ca96750..362cf07b43c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/TableMapping.java @@ -21,15 +21,13 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.NET_TOPOLOGY_TA import java.io.BufferedReader; import java.io.FileInputStream; -import java.io.FileReader; -import java.io.IOException; import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.io.Charsets; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -102,7 +100,7 @@ public class TableMapping extends CachedDNSToSwitchMapping { try (BufferedReader reader = new BufferedReader(new InputStreamReader( - new FileInputStream(filename), Charsets.UTF_8))) { + new FileInputStream(filename), StandardCharsets.UTF_8))) { String line = reader.readLine(); while (line != null) { line = line.trim(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java index c62a49c07a3..5a8e81f0229 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java @@ -29,13 +29,13 @@ import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -242,7 +242,7 @@ public class Credentials implements Writable { } private static final byte[] TOKEN_STORAGE_MAGIC = - "HDTS".getBytes(Charsets.UTF_8); + "HDTS".getBytes(StandardCharsets.UTF_8); private static final byte TOKEN_STORAGE_VERSION = 1; /** diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java index 5a0b1d9d4c1..4b941efbefc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/LdapGroupsMapping.java @@ -21,6 +21,7 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.Hashtable; @@ -40,7 +41,6 @@ import javax.naming.directory.SearchResult; import javax.naming.ldap.LdapName; import javax.naming.ldap.Rdn; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -642,7 +642,7 @@ public class LdapGroupsMapping StringBuilder password = new StringBuilder(); try (Reader reader = new InputStreamReader( - new FileInputStream(pwFile), Charsets.UTF_8)) { + new FileInputStream(pwFile), StandardCharsets.UTF_8)) { int c = reader.read(); while (c > -1) { password.append((char)c); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java index 50acc5c51bf..377a0f11270 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java @@ -23,6 +23,7 @@ import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.security.PrivilegedExceptionAction; import java.security.Security; import java.util.ArrayList; @@ -44,7 +45,6 @@ import javax.security.sasl.SaslServer; import javax.security.sasl.SaslServerFactory; import org.apache.commons.codec.binary.Base64; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -185,11 +185,11 @@ public class SaslRpcServer { } static String encodeIdentifier(byte[] identifier) { - return new String(Base64.encodeBase64(identifier), Charsets.UTF_8); + return new String(Base64.encodeBase64(identifier), StandardCharsets.UTF_8); } static byte[] decodeIdentifier(String identifier) { - return Base64.decodeBase64(identifier.getBytes(Charsets.UTF_8)); + return Base64.decodeBase64(identifier.getBytes(StandardCharsets.UTF_8)); } public static T getIdentifier(String id, @@ -208,7 +208,7 @@ public class SaslRpcServer { static char[] encodePassword(byte[] password) { return new String(Base64.encodeBase64(password), - Charsets.UTF_8).toCharArray(); + StandardCharsets.UTF_8).toCharArray(); } /** Splitting fully qualified Kerberos name into parts */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java index e45ac8cf11d..efc1fd6e85a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedIdMapping.java @@ -23,12 +23,12 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; @@ -583,7 +583,7 @@ public class ShellBasedIdMapping implements IdMappingServiceProvider { Map gidMapping = new HashMap(); BufferedReader in = new BufferedReader(new InputStreamReader( - new FileInputStream(staticMapFile), Charsets.UTF_8)); + new FileInputStream(staticMapFile), StandardCharsets.UTF_8)); try { String line = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java index 127ccf005d8..0c960d891b5 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/UserProvider.java @@ -20,10 +20,10 @@ package org.apache.hadoop.security.alias; import java.io.IOException; import java.net.URI; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; -import org.apache.commons.io.Charsets; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; @@ -58,7 +58,7 @@ public class UserProvider extends CredentialProvider { return null; } return new CredentialEntry( - alias, new String(bytes, Charsets.UTF_8).toCharArray()); + alias, new String(bytes, StandardCharsets.UTF_8).toCharArray()); } @Override diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java index 4cf1ead23fb..038435c6c37 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java @@ -21,11 +21,9 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.PrintStream; import java.net.InetSocketAddress; -import java.util.Arrays; import java.util.LinkedList; import java.util.List; -import org.apache.commons.io.Charsets; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java index b0c12be731c..6ee1212df35 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/FileBasedIPList.java @@ -23,12 +23,12 @@ import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -89,7 +89,7 @@ public class FileBasedIPList implements IPList { if (file.exists()) { try ( Reader fileReader = new InputStreamReader( - new FileInputStream(file), Charsets.UTF_8); + new FileInputStream(file), StandardCharsets.UTF_8); BufferedReader bufferedReader = new BufferedReader(fileReader)) { List lines = new ArrayList(); String line = null; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java index c5d6b869c35..1cba4266afe 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/HostsFileReader.java @@ -19,13 +19,13 @@ package org.apache.hadoop.util; import java.io.*; +import java.nio.charset.StandardCharsets; import java.util.Set; import java.util.HashSet; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.Log; import org.apache.hadoop.classification.InterfaceAudience; @@ -85,7 +85,7 @@ public class HostsFileReader { BufferedReader reader = null; try { reader = new BufferedReader( - new InputStreamReader(fileInputStream, Charsets.UTF_8)); + new InputStreamReader(fileInputStream, StandardCharsets.UTF_8)); String line; while ((line = reader.readLine()) != null) { String[] nodes = line.split("[ \t\n\f\r]+"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java index c19d238d481..daf64bd9e49 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/metrics2/impl/TestGangliaMetrics.java @@ -25,12 +25,12 @@ import java.io.IOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.SocketException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.metrics2.AbstractMetric; @@ -148,7 +148,7 @@ public class TestGangliaMetrics { private void checkMetrics(List bytearrlist, int expectedCount) { boolean[] foundMetrics = new boolean[expectedMetrics.length]; for (byte[] bytes : bytearrlist) { - String binaryStr = new String(bytes, Charsets.UTF_8); + String binaryStr = new String(bytes, StandardCharsets.UTF_8); for (int index = 0; index < expectedMetrics.length; index++) { if (binaryStr.indexOf(expectedMetrics[index]) >= 0) { foundMetrics[index] = true; diff --git a/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java b/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java index 281b3ccc6e9..fe194c00e45 100644 --- a/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java +++ b/hadoop-common-project/hadoop-minikdc/src/main/java/org/apache/hadoop/minikdc/MiniKdc.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.minikdc; -import org.apache.commons.io.Charsets; import org.apache.kerby.kerberos.kerb.KrbException; import org.apache.kerby.kerberos.kerb.server.KdcConfigKey; import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer; @@ -31,6 +30,7 @@ import java.io.FileInputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashSet; import java.util.Locale; @@ -95,7 +95,8 @@ public class MiniKdc { Properties userConf = new Properties(); InputStreamReader r = null; try { - r = new InputStreamReader(new FileInputStream(file), Charsets.UTF_8); + r = new InputStreamReader(new FileInputStream(file), + StandardCharsets.UTF_8); userConf.load(r); } finally { if (r != null) { diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java index 889d45a444f..a073f87715e 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.mount; +import java.nio.charset.StandardCharsets; import java.util.List; -import org.apache.commons.io.Charsets; import org.apache.hadoop.nfs.NfsExports; import org.apache.hadoop.oncrpc.RpcAcceptedReply; import org.apache.hadoop.oncrpc.XDR; @@ -77,7 +77,8 @@ public class MountResponse { if (hostGroups.length > 0) { for (int j = 0; j < hostGroups.length; j++) { xdr.writeBoolean(true); // Value follows - yes - xdr.writeVariableOpaque(hostGroups[j].getBytes(Charsets.UTF_8)); + xdr.writeVariableOpaque( + hostGroups[j].getBytes(StandardCharsets.UTF_8)); } } xdr.writeBoolean(false); // Value follows - no more group diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java index 415b459f68f..f58c8b322f8 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/FileHandle.java @@ -18,11 +18,11 @@ package org.apache.hadoop.nfs.nfs3; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Arrays; -import org.apache.commons.io.Charsets; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.XDR; @@ -73,7 +73,7 @@ public class FileHandle { return; } - byte[] in = s.getBytes(Charsets.UTF_8); + byte[] in = s.getBytes(StandardCharsets.UTF_8); digest.update(in); byte[] digestbytes = digest.digest(); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java index e75ce15b52a..39c368d939b 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/CREATE3Request.java @@ -18,8 +18,8 @@ package org.apache.hadoop.nfs.nfs3.request; import java.io.IOException; +import java.nio.charset.StandardCharsets; -import org.apache.commons.io.Charsets; import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.Nfs3Constant; import org.apache.hadoop.oncrpc.XDR; @@ -79,9 +79,9 @@ public class CREATE3Request extends RequestWithHandle { public void serialize(XDR xdr) { handle.serialize(xdr); xdr.writeInt(name.length()); - xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8), name.length()); + xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8), name.length()); xdr.writeInt(mode); objAttr.serialize(xdr); } -} \ No newline at end of file +} diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java index 1dcb85d3d46..6c7b76a7611 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java @@ -18,8 +18,8 @@ package org.apache.hadoop.nfs.nfs3.request; import java.io.IOException; +import java.nio.charset.StandardCharsets; -import org.apache.commons.io.Charsets; import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.oncrpc.XDR; @@ -57,6 +57,7 @@ public class LINK3Request extends RequestWithHandle { handle.serialize(xdr); fromDirHandle.serialize(xdr); xdr.writeInt(fromName.length()); - xdr.writeFixedOpaque(fromName.getBytes(Charsets.UTF_8), fromName.length()); + xdr.writeFixedOpaque(fromName.getBytes(StandardCharsets.UTF_8), + fromName.length()); } } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java index 0435483e837..4d31a8203f2 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LOOKUP3Request.java @@ -18,8 +18,8 @@ package org.apache.hadoop.nfs.nfs3.request; import java.io.IOException; +import java.nio.charset.StandardCharsets; -import org.apache.commons.io.Charsets; import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.oncrpc.XDR; @@ -54,7 +54,7 @@ public class LOOKUP3Request extends RequestWithHandle { @VisibleForTesting public void serialize(XDR xdr) { handle.serialize(xdr); - xdr.writeInt(name.getBytes(Charsets.UTF_8).length); - xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8)); + xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length); + xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8)); } -} \ No newline at end of file +} diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java index bba26eeb301..93e7c7a772b 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKDIR3Request.java @@ -18,8 +18,8 @@ package org.apache.hadoop.nfs.nfs3.request; import java.io.IOException; +import java.nio.charset.StandardCharsets; -import org.apache.commons.io.Charsets; import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.oncrpc.XDR; @@ -55,8 +55,8 @@ public class MKDIR3Request extends RequestWithHandle { @Override public void serialize(XDR xdr) { handle.serialize(xdr); - xdr.writeInt(name.getBytes(Charsets.UTF_8).length); - xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8)); + xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length); + xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8)); objAttr.serialize(xdr); } -} \ No newline at end of file +} diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKNOD3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKNOD3Request.java index 0659dd10c04..aa2ec8c0969 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKNOD3Request.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/MKNOD3Request.java @@ -18,8 +18,8 @@ package org.apache.hadoop.nfs.nfs3.request; import java.io.IOException; +import java.nio.charset.StandardCharsets; -import org.apache.commons.io.Charsets; import org.apache.hadoop.nfs.NfsFileType; import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes.Specdata3; @@ -80,7 +80,7 @@ public class MKNOD3Request extends RequestWithHandle { public void serialize(XDR xdr) { handle.serialize(xdr); xdr.writeInt(name.length()); - xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8), name.length()); + xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8), name.length()); objAttr.serialize(xdr); if (spec != null) { xdr.writeInt(spec.getSpecdata1()); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java index 9ad156d90ea..c4ca6d26ca6 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/REMOVE3Request.java @@ -18,8 +18,8 @@ package org.apache.hadoop.nfs.nfs3.request; import java.io.IOException; +import java.nio.charset.StandardCharsets; -import org.apache.commons.io.Charsets; import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.oncrpc.XDR; @@ -47,7 +47,7 @@ public class REMOVE3Request extends RequestWithHandle { @Override public void serialize(XDR xdr) { handle.serialize(xdr); - xdr.writeInt(name.getBytes(Charsets.UTF_8).length); - xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8)); + xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length); + xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8)); } -} \ No newline at end of file +} diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java index c54a8e2da2d..561b79e3ea9 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RENAME3Request.java @@ -18,8 +18,8 @@ package org.apache.hadoop.nfs.nfs3.request; import java.io.IOException; +import java.nio.charset.StandardCharsets; -import org.apache.commons.io.Charsets; import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.oncrpc.XDR; @@ -67,10 +67,10 @@ public class RENAME3Request extends NFS3Request { @Override public void serialize(XDR xdr) { fromDirHandle.serialize(xdr); - xdr.writeInt(fromName.getBytes(Charsets.UTF_8).length); - xdr.writeFixedOpaque(fromName.getBytes(Charsets.UTF_8)); + xdr.writeInt(fromName.getBytes(StandardCharsets.UTF_8).length); + xdr.writeFixedOpaque(fromName.getBytes(StandardCharsets.UTF_8)); toDirHandle.serialize(xdr); - xdr.writeInt(toName.getBytes(Charsets.UTF_8).length); - xdr.writeFixedOpaque(toName.getBytes(Charsets.UTF_8)); + xdr.writeInt(toName.getBytes(StandardCharsets.UTF_8).length); + xdr.writeFixedOpaque(toName.getBytes(StandardCharsets.UTF_8)); } -} \ No newline at end of file +} diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java index 6ae0de89fbf..61462452818 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/RMDIR3Request.java @@ -18,8 +18,8 @@ package org.apache.hadoop.nfs.nfs3.request; import java.io.IOException; +import java.nio.charset.StandardCharsets; -import org.apache.commons.io.Charsets; import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.oncrpc.XDR; @@ -47,7 +47,7 @@ public class RMDIR3Request extends RequestWithHandle { @Override public void serialize(XDR xdr) { handle.serialize(xdr); - xdr.writeInt(name.getBytes(Charsets.UTF_8).length); - xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8)); + xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length); + xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8)); } -} \ No newline at end of file +} diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java index 59188e2b50e..86d2b9fb429 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/SYMLINK3Request.java @@ -18,8 +18,8 @@ package org.apache.hadoop.nfs.nfs3.request; import java.io.IOException; +import java.nio.charset.StandardCharsets; -import org.apache.commons.io.Charsets; import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.oncrpc.XDR; @@ -63,10 +63,10 @@ public class SYMLINK3Request extends RequestWithHandle { @Override public void serialize(XDR xdr) { handle.serialize(xdr); - xdr.writeInt(name.getBytes(Charsets.UTF_8).length); - xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8)); + xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length); + xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8)); symAttr.serialize(xdr); - xdr.writeInt(symData.getBytes(Charsets.UTF_8).length); - xdr.writeFixedOpaque(symData.getBytes(Charsets.UTF_8)); + xdr.writeInt(symData.getBytes(StandardCharsets.UTF_8).length); + xdr.writeFixedOpaque(symData.getBytes(StandardCharsets.UTF_8)); } -} \ No newline at end of file +} diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java index 474559ea92f..b1353886925 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/XDR.java @@ -18,8 +18,8 @@ package org.apache.hadoop.oncrpc; import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; -import org.apache.commons.io.Charsets; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; @@ -166,11 +166,11 @@ public final class XDR { } public String readString() { - return new String(readVariableOpaque(), Charsets.UTF_8); + return new String(readVariableOpaque(), StandardCharsets.UTF_8); } public void writeString(String s) { - writeVariableOpaque(s.getBytes(Charsets.UTF_8)); + writeVariableOpaque(s.getBytes(StandardCharsets.UTF_8)); } private void writePadding() { @@ -270,4 +270,4 @@ public final class XDR { return b; } -} \ No newline at end of file +} diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java index 4d39de31703..19ba32022ee 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/CredentialsSys.java @@ -19,9 +19,9 @@ package org.apache.hadoop.oncrpc.security; import java.net.InetAddress; import java.net.UnknownHostException; +import java.nio.charset.StandardCharsets; import com.google.common.annotations.VisibleForTesting; -import org.apache.commons.io.Charsets; import org.apache.hadoop.oncrpc.XDR; /** Credential used by AUTH_SYS */ @@ -106,11 +106,11 @@ public class CredentialsSys extends Credentials { public void write(XDR xdr) { int padding = 0; // Ensure there are padding bytes if hostname is not a multiple of 4. - padding = 4 - (mHostName.getBytes(Charsets.UTF_8).length % 4); + padding = 4 - (mHostName.getBytes(StandardCharsets.UTF_8).length % 4); // padding bytes is zero if hostname is already a multiple of 4. padding = padding % 4; // mStamp + mHostName.length + mHostName + mUID + mGID + mAuxGIDs.count - mCredentialsLength = 20 + mHostName.getBytes(Charsets.UTF_8).length; + mCredentialsLength = 20 + mHostName.getBytes(StandardCharsets.UTF_8).length; mCredentialsLength = mCredentialsLength + padding; // mAuxGIDs if (mAuxGIDs != null && mAuxGIDs.length > 0) {