HADOOP-13444. Replace org.apache.commons.io.Charsets with java.nio.charset.StandardCharsets. Contributed by Vincent Poon.

This commit is contained in:
Akira Ajisaka 2016-08-01 17:35:59 +09:00
parent 34ccaa8367
commit 770b5eb2db
37 changed files with 132 additions and 117 deletions

View File

@ -23,6 +23,7 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.nio.charset.StandardCharsets;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.util.Collections; import java.util.Collections;
import java.util.Date; import java.util.Date;
@ -32,7 +33,6 @@ import java.util.Map;
import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter; import com.google.gson.stream.JsonWriter;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -209,7 +209,7 @@ public abstract class KeyProvider {
protected byte[] serialize() throws IOException { protected byte[] serialize() throws IOException {
ByteArrayOutputStream buffer = new ByteArrayOutputStream(); ByteArrayOutputStream buffer = new ByteArrayOutputStream();
JsonWriter writer = new JsonWriter( JsonWriter writer = new JsonWriter(
new OutputStreamWriter(buffer, Charsets.UTF_8)); new OutputStreamWriter(buffer, StandardCharsets.UTF_8));
try { try {
writer.beginObject(); writer.beginObject();
if (cipher != null) { if (cipher != null) {
@ -252,8 +252,9 @@ public abstract class KeyProvider {
int versions = 0; int versions = 0;
String description = null; String description = null;
Map<String, String> attributes = null; Map<String, String> attributes = null;
JsonReader reader = new JsonReader(new InputStreamReader JsonReader reader =
(new ByteArrayInputStream(bytes), Charsets.UTF_8)); new JsonReader(new InputStreamReader(new ByteArrayInputStream(bytes),
StandardCharsets.UTF_8));
try { try {
reader.beginObject(); reader.beginObject();
while (reader.hasNext()) { while (reader.hasNext()) {

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.crypto.key.kms; package org.apache.hadoop.crypto.key.kms;
import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.key.KeyProvider; import org.apache.hadoop.crypto.key.KeyProvider;
@ -65,6 +64,7 @@ import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
import java.net.URLEncoder; import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.security.GeneralSecurityException; import java.security.GeneralSecurityException;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
@ -271,7 +271,7 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
} }
private static void writeJson(Map map, OutputStream os) throws IOException { private static void writeJson(Map map, OutputStream os) throws IOException {
Writer writer = new OutputStreamWriter(os, Charsets.UTF_8); Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8);
ObjectMapper jsonMapper = new ObjectMapper(); ObjectMapper jsonMapper = new ObjectMapper();
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map); jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map);
} }

View File

@ -21,6 +21,7 @@ import java.io.ByteArrayOutputStream;
import java.io.EOFException; import java.io.EOFException;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.zip.GZIPInputStream; import java.util.zip.GZIPInputStream;
@ -32,7 +33,6 @@ import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.DatumWriter; import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.EncoderFactory; import org.apache.avro.io.EncoderFactory;
import org.apache.avro.io.JsonEncoder; import org.apache.avro.io.JsonEncoder;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -235,10 +235,10 @@ class Display extends FsCommand {
if (!r.next(key, val)) { if (!r.next(key, val)) {
return -1; return -1;
} }
byte[] tmp = key.toString().getBytes(Charsets.UTF_8); byte[] tmp = key.toString().getBytes(StandardCharsets.UTF_8);
outbuf.write(tmp, 0, tmp.length); outbuf.write(tmp, 0, tmp.length);
outbuf.write('\t'); outbuf.write('\t');
tmp = val.toString().getBytes(Charsets.UTF_8); tmp = val.toString().getBytes(StandardCharsets.UTF_8);
outbuf.write(tmp, 0, tmp.length); outbuf.write(tmp, 0, tmp.length);
outbuf.write('\n'); outbuf.write('\n');
inbuf.reset(outbuf.getData(), outbuf.getLength()); inbuf.reset(outbuf.getData(), outbuf.getLength());
@ -301,7 +301,7 @@ class Display extends FsCommand {
if (!fileReader.hasNext()) { if (!fileReader.hasNext()) {
// Write a new line after the last Avro record. // Write a new line after the last Avro record.
output.write(System.getProperty("line.separator") output.write(System.getProperty("line.separator")
.getBytes(Charsets.UTF_8)); .getBytes(StandardCharsets.UTF_8));
output.flush(); output.flush();
} }
pos = 0; pos = 0;

View File

@ -21,8 +21,8 @@ import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
/** /**
@ -78,7 +78,7 @@ class StreamPumper {
protected void pump() throws IOException { protected void pump() throws IOException {
InputStreamReader inputStreamReader = new InputStreamReader( InputStreamReader inputStreamReader = new InputStreamReader(
stream, Charsets.UTF_8); stream, StandardCharsets.UTF_8);
BufferedReader br = new BufferedReader(inputStreamReader); BufferedReader br = new BufferedReader(inputStreamReader);
String line = null; String line = null;
while ((line = br.readLine()) != null) { while ((line = br.readLine()) != null) {

View File

@ -17,21 +17,25 @@
*/ */
package org.apache.hadoop.http; package org.apache.hadoop.http;
import org.apache.commons.io.Charsets;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
/** /**
* This class is responsible for quoting HTML characters. * This class is responsible for quoting HTML characters.
*/ */
public class HtmlQuoting { public class HtmlQuoting {
private static final byte[] ampBytes = "&amp;".getBytes(Charsets.UTF_8); private static final byte[] AMP_BYTES =
private static final byte[] aposBytes = "&apos;".getBytes(Charsets.UTF_8); "&amp;".getBytes(StandardCharsets.UTF_8);
private static final byte[] gtBytes = "&gt;".getBytes(Charsets.UTF_8); private static final byte[] APOS_BYTES =
private static final byte[] ltBytes = "&lt;".getBytes(Charsets.UTF_8); "&apos;".getBytes(StandardCharsets.UTF_8);
private static final byte[] quotBytes = "&quot;".getBytes(Charsets.UTF_8); private static final byte[] GT_BYTES =
"&gt;".getBytes(StandardCharsets.UTF_8);
private static final byte[] LT_BYTES =
"&lt;".getBytes(StandardCharsets.UTF_8);
private static final byte[] QUOT_BYTES =
"&quot;".getBytes(StandardCharsets.UTF_8);
/** /**
* Does the given string need to be quoted? * Does the given string need to be quoted?
@ -65,7 +69,7 @@ public class HtmlQuoting {
if (str == null) { if (str == null) {
return false; return false;
} }
byte[] bytes = str.getBytes(Charsets.UTF_8); byte[] bytes = str.getBytes(StandardCharsets.UTF_8);
return needsQuoting(bytes, 0 , bytes.length); return needsQuoting(bytes, 0 , bytes.length);
} }
@ -81,11 +85,21 @@ public class HtmlQuoting {
int off, int len) throws IOException { int off, int len) throws IOException {
for(int i=off; i < off+len; i++) { for(int i=off; i < off+len; i++) {
switch (buffer[i]) { switch (buffer[i]) {
case '&': output.write(ampBytes); break; case '&':
case '<': output.write(ltBytes); break; output.write(AMP_BYTES);
case '>': output.write(gtBytes); break; break;
case '\'': output.write(aposBytes); break; case '<':
case '"': output.write(quotBytes); break; output.write(LT_BYTES);
break;
case '>':
output.write(GT_BYTES);
break;
case '\'':
output.write(APOS_BYTES);
break;
case '"':
output.write(QUOT_BYTES);
break;
default: output.write(buffer, i, 1); default: output.write(buffer, i, 1);
} }
} }
@ -100,7 +114,7 @@ public class HtmlQuoting {
if (item == null) { if (item == null) {
return null; return null;
} }
byte[] bytes = item.getBytes(Charsets.UTF_8); byte[] bytes = item.getBytes(StandardCharsets.UTF_8);
if (needsQuoting(bytes, 0, bytes.length)) { if (needsQuoting(bytes, 0, bytes.length)) {
ByteArrayOutputStream buffer = new ByteArrayOutputStream(); ByteArrayOutputStream buffer = new ByteArrayOutputStream();
try { try {

View File

@ -19,11 +19,11 @@
package org.apache.hadoop.io; package org.apache.hadoop.io;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.charset.UnsupportedCharsetException; import java.nio.charset.UnsupportedCharsetException;
import java.util.ArrayList; import java.util.ArrayList;
import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -91,7 +91,7 @@ public class DefaultStringifier<T> implements Stringifier<T> {
serializer.serialize(obj); serializer.serialize(obj);
byte[] buf = new byte[outBuf.getLength()]; byte[] buf = new byte[outBuf.getLength()];
System.arraycopy(outBuf.getData(), 0, buf, 0, buf.length); System.arraycopy(outBuf.getData(), 0, buf, 0, buf.length);
return new String(Base64.encodeBase64(buf), Charsets.UTF_8); return new String(Base64.encodeBase64(buf), StandardCharsets.UTF_8);
} }
@Override @Override

View File

@ -19,11 +19,11 @@
package org.apache.hadoop.io; package org.apache.hadoop.io;
import java.io.*; import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.*; import java.util.*;
import java.rmi.server.UID; import java.rmi.server.UID;
import java.security.MessageDigest; import java.security.MessageDigest;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.*; import org.apache.commons.logging.*;
import org.apache.hadoop.util.Options; import org.apache.hadoop.util.Options;
import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.*;
@ -853,7 +853,7 @@ public class SequenceFile {
try { try {
MessageDigest digester = MessageDigest.getInstance("MD5"); MessageDigest digester = MessageDigest.getInstance("MD5");
long time = Time.now(); long time = Time.now();
digester.update((new UID()+"@"+time).getBytes(Charsets.UTF_8)); digester.update((new UID()+"@"+time).getBytes(StandardCharsets.UTF_8));
sync = digester.digest(); sync = digester.digest();
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException(e); throw new RuntimeException(e);

View File

@ -22,8 +22,8 @@ import java.io.BufferedInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -287,7 +287,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
// The compressed bzip2 stream should start with the // The compressed bzip2 stream should start with the
// identifying characters BZ. Caller of CBZip2OutputStream // identifying characters BZ. Caller of CBZip2OutputStream
// i.e. this class must write these characters. // i.e. this class must write these characters.
out.write(HEADER.getBytes(Charsets.UTF_8)); out.write(HEADER.getBytes(StandardCharsets.UTF_8));
} }
} }
@ -421,7 +421,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
byte[] headerBytes = new byte[HEADER_LEN]; byte[] headerBytes = new byte[HEADER_LEN];
int actualRead = bufferedIn.read(headerBytes, 0, HEADER_LEN); int actualRead = bufferedIn.read(headerBytes, 0, HEADER_LEN);
if (actualRead != -1) { if (actualRead != -1) {
String header = new String(headerBytes, Charsets.UTF_8); String header = new String(headerBytes, StandardCharsets.UTF_8);
if (header.compareTo(HEADER) != 0) { if (header.compareTo(HEADER) != 0) {
bufferedIn.reset(); bufferedIn.reset();
} else { } else {

View File

@ -18,13 +18,13 @@ package org.apache.hadoop.io.file.tfile;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import java.util.Collection; import java.util.Collection;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -234,7 +234,7 @@ class TFileDumper {
out.printf("%X", b); out.printf("%X", b);
} }
} else { } else {
out.print(new String(key, 0, sampleLen, Charsets.UTF_8)); out.print(new String(key, 0, sampleLen, StandardCharsets.UTF_8));
} }
if (sampleLen < key.length) { if (sampleLen < key.length) {
out.print("..."); out.print("...");

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.ipc; package org.apache.hadoop.ipc;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private
@ -54,8 +54,8 @@ public class RpcConstants {
/** /**
* The first four bytes of Hadoop RPC connections * The first four bytes of Hadoop RPC connections
*/ */
public static final ByteBuffer HEADER = ByteBuffer.wrap("hrpc".getBytes public static final ByteBuffer HEADER =
(Charsets.UTF_8)); ByteBuffer.wrap("hrpc".getBytes(StandardCharsets.UTF_8));
public static final int HEADER_LEN_AFTER_HRPC_PART = 3; // 3 bytes that follow public static final int HEADER_LEN_AFTER_HRPC_PART = 3; // 3 bytes that follow
// 1 : Introduce ping and server does not throw away RPCs // 1 : Introduce ping and server does not throw away RPCs

View File

@ -47,6 +47,7 @@ import java.nio.channels.Selector;
import java.nio.channels.ServerSocketChannel; import java.nio.channels.ServerSocketChannel;
import java.nio.channels.SocketChannel; import java.nio.channels.SocketChannel;
import java.nio.channels.WritableByteChannel; import java.nio.channels.WritableByteChannel;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -69,7 +70,6 @@ import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException; import javax.security.sasl.SaslException;
import javax.security.sasl.SaslServer; import javax.security.sasl.SaslServer;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -223,7 +223,7 @@ public abstract class Server {
* and send back a nicer response. * and send back a nicer response.
*/ */
private static final ByteBuffer HTTP_GET_BYTES = ByteBuffer.wrap( private static final ByteBuffer HTTP_GET_BYTES = ByteBuffer.wrap(
"GET ".getBytes(Charsets.UTF_8)); "GET ".getBytes(StandardCharsets.UTF_8));
/** /**
* An HTTP response to send back if we detect an HTTP request to our IPC * An HTTP response to send back if we detect an HTTP request to our IPC
@ -1957,7 +1957,7 @@ public abstract class Server {
private void setupHttpRequestOnIpcPortResponse() throws IOException { private void setupHttpRequestOnIpcPortResponse() throws IOException {
Call fakeCall = new Call(0, RpcConstants.INVALID_RETRY_COUNT, null, this); Call fakeCall = new Call(0, RpcConstants.INVALID_RETRY_COUNT, null, this);
fakeCall.setResponse(ByteBuffer.wrap( fakeCall.setResponse(ByteBuffer.wrap(
RECEIVED_HTTP_REQ_RESPONSE.getBytes(Charsets.UTF_8))); RECEIVED_HTTP_REQ_RESPONSE.getBytes(StandardCharsets.UTF_8)));
fakeCall.sendResponse(); fakeCall.sendResponse();
} }

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.metrics2.sink; package org.apache.hadoop.metrics2.sink;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -35,6 +34,7 @@ import java.io.IOException;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.io.Writer; import java.io.Writer;
import java.net.Socket; import java.net.Socket;
import java.nio.charset.StandardCharsets;
/** /**
* A metrics sink that writes to a Graphite server * A metrics sink that writes to a Graphite server
@ -150,7 +150,8 @@ public class GraphiteSink implements MetricsSink, Closeable {
try { try {
// Open a connection to Graphite server. // Open a connection to Graphite server.
socket = new Socket(serverHost, serverPort); socket = new Socket(serverHost, serverPort);
writer = new OutputStreamWriter(socket.getOutputStream(), Charsets.UTF_8); writer = new OutputStreamWriter(socket.getOutputStream(),
StandardCharsets.UTF_8);
} catch (Exception e) { } catch (Exception e) {
connectionFailures++; connectionFailures++;
if (tooManyConnectionFailures()) { if (tooManyConnectionFailures()) {

View File

@ -20,12 +20,12 @@ package org.apache.hadoop.metrics2.sink.ganglia;
import java.io.IOException; import java.io.IOException;
import java.net.*; import java.net.*;
import java.nio.charset.StandardCharsets;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.MetricsSink; import org.apache.hadoop.metrics2.MetricsSink;
@ -235,7 +235,7 @@ public abstract class AbstractGangliaSink implements MetricsSink {
* @param s the string to be written to buffer at offset location * @param s the string to be written to buffer at offset location
*/ */
protected void xdr_string(String s) { protected void xdr_string(String s) {
byte[] bytes = s.getBytes(Charsets.UTF_8); byte[] bytes = s.getBytes(StandardCharsets.UTF_8);
int len = bytes.length; int len = bytes.length;
xdr_int(len); xdr_int(len);
System.arraycopy(bytes, 0, buffer, offset, len); System.arraycopy(bytes, 0, buffer, offset, len);

View File

@ -21,15 +21,13 @@ import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.NET_TOPOLOGY_TA
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.io.Charsets;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -102,7 +100,7 @@ public class TableMapping extends CachedDNSToSwitchMapping {
try (BufferedReader reader = try (BufferedReader reader =
new BufferedReader(new InputStreamReader( new BufferedReader(new InputStreamReader(
new FileInputStream(filename), Charsets.UTF_8))) { new FileInputStream(filename), StandardCharsets.UTF_8))) {
String line = reader.readLine(); String line = reader.readLine();
while (line != null) { while (line != null) {
line = line.trim(); line = line.trim();

View File

@ -29,13 +29,13 @@ import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -242,7 +242,7 @@ public class Credentials implements Writable {
} }
private static final byte[] TOKEN_STORAGE_MAGIC = private static final byte[] TOKEN_STORAGE_MAGIC =
"HDTS".getBytes(Charsets.UTF_8); "HDTS".getBytes(StandardCharsets.UTF_8);
private static final byte TOKEN_STORAGE_VERSION = 1; private static final byte TOKEN_STORAGE_VERSION = 1;
/** /**

View File

@ -21,6 +21,7 @@ import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.Reader; import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.Hashtable; import java.util.Hashtable;
@ -40,7 +41,6 @@ import javax.naming.directory.SearchResult;
import javax.naming.ldap.LdapName; import javax.naming.ldap.LdapName;
import javax.naming.ldap.Rdn; import javax.naming.ldap.Rdn;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -642,7 +642,7 @@ public class LdapGroupsMapping
StringBuilder password = new StringBuilder(); StringBuilder password = new StringBuilder();
try (Reader reader = new InputStreamReader( try (Reader reader = new InputStreamReader(
new FileInputStream(pwFile), Charsets.UTF_8)) { new FileInputStream(pwFile), StandardCharsets.UTF_8)) {
int c = reader.read(); int c = reader.read();
while (c > -1) { while (c > -1) {
password.append((char)c); password.append((char)c);

View File

@ -23,6 +23,7 @@ import java.io.DataInput;
import java.io.DataInputStream; import java.io.DataInputStream;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.security.Security; import java.security.Security;
import java.util.ArrayList; import java.util.ArrayList;
@ -44,7 +45,6 @@ import javax.security.sasl.SaslServer;
import javax.security.sasl.SaslServerFactory; import javax.security.sasl.SaslServerFactory;
import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -185,11 +185,11 @@ public class SaslRpcServer {
} }
static String encodeIdentifier(byte[] identifier) { static String encodeIdentifier(byte[] identifier) {
return new String(Base64.encodeBase64(identifier), Charsets.UTF_8); return new String(Base64.encodeBase64(identifier), StandardCharsets.UTF_8);
} }
static byte[] decodeIdentifier(String identifier) { static byte[] decodeIdentifier(String identifier) {
return Base64.decodeBase64(identifier.getBytes(Charsets.UTF_8)); return Base64.decodeBase64(identifier.getBytes(StandardCharsets.UTF_8));
} }
public static <T extends TokenIdentifier> T getIdentifier(String id, public static <T extends TokenIdentifier> T getIdentifier(String id,
@ -208,7 +208,7 @@ public class SaslRpcServer {
static char[] encodePassword(byte[] password) { static char[] encodePassword(byte[] password) {
return new String(Base64.encodeBase64(password), return new String(Base64.encodeBase64(password),
Charsets.UTF_8).toCharArray(); StandardCharsets.UTF_8).toCharArray();
} }
/** Splitting fully qualified Kerberos name into parts */ /** Splitting fully qualified Kerberos name into parts */

View File

@ -23,12 +23,12 @@ import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.nio.charset.Charset; import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -583,7 +583,7 @@ public class ShellBasedIdMapping implements IdMappingServiceProvider {
Map<Integer, Integer> gidMapping = new HashMap<Integer, Integer>(); Map<Integer, Integer> gidMapping = new HashMap<Integer, Integer>();
BufferedReader in = new BufferedReader(new InputStreamReader( BufferedReader in = new BufferedReader(new InputStreamReader(
new FileInputStream(staticMapFile), Charsets.UTF_8)); new FileInputStream(staticMapFile), StandardCharsets.UTF_8));
try { try {
String line = null; String line = null;

View File

@ -20,10 +20,10 @@ package org.apache.hadoop.security.alias;
import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
@ -58,7 +58,7 @@ public class UserProvider extends CredentialProvider {
return null; return null;
} }
return new CredentialEntry( return new CredentialEntry(
alias, new String(bytes, Charsets.UTF_8).toCharArray()); alias, new String(bytes, StandardCharsets.UTF_8).toCharArray());
} }
@Override @Override

View File

@ -21,11 +21,9 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;

View File

@ -23,12 +23,12 @@ import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.Reader; import java.io.Reader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -89,7 +89,7 @@ public class FileBasedIPList implements IPList {
if (file.exists()) { if (file.exists()) {
try ( try (
Reader fileReader = new InputStreamReader( Reader fileReader = new InputStreamReader(
new FileInputStream(file), Charsets.UTF_8); new FileInputStream(file), StandardCharsets.UTF_8);
BufferedReader bufferedReader = new BufferedReader(fileReader)) { BufferedReader bufferedReader = new BufferedReader(fileReader)) {
List<String> lines = new ArrayList<String>(); List<String> lines = new ArrayList<String>();
String line = null; String line = null;

View File

@ -19,13 +19,13 @@
package org.apache.hadoop.util; package org.apache.hadoop.util;
import java.io.*; import java.io.*;
import java.nio.charset.StandardCharsets;
import java.util.Set; import java.util.Set;
import java.util.HashSet; import java.util.HashSet;
import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -85,7 +85,7 @@ public class HostsFileReader {
BufferedReader reader = null; BufferedReader reader = null;
try { try {
reader = new BufferedReader( reader = new BufferedReader(
new InputStreamReader(fileInputStream, Charsets.UTF_8)); new InputStreamReader(fileInputStream, StandardCharsets.UTF_8));
String line; String line;
while ((line = reader.readLine()) != null) { while ((line = reader.readLine()) != null) {
String[] nodes = line.split("[ \t\n\f\r]+"); String[] nodes = line.split("[ \t\n\f\r]+");

View File

@ -25,12 +25,12 @@ import java.io.IOException;
import java.net.DatagramPacket; import java.net.DatagramPacket;
import java.net.DatagramSocket; import java.net.DatagramSocket;
import java.net.SocketException; import java.net.SocketException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.AbstractMetric;
@ -148,7 +148,7 @@ public class TestGangliaMetrics {
private void checkMetrics(List<byte[]> bytearrlist, int expectedCount) { private void checkMetrics(List<byte[]> bytearrlist, int expectedCount) {
boolean[] foundMetrics = new boolean[expectedMetrics.length]; boolean[] foundMetrics = new boolean[expectedMetrics.length];
for (byte[] bytes : bytearrlist) { for (byte[] bytes : bytearrlist) {
String binaryStr = new String(bytes, Charsets.UTF_8); String binaryStr = new String(bytes, StandardCharsets.UTF_8);
for (int index = 0; index < expectedMetrics.length; index++) { for (int index = 0; index < expectedMetrics.length; index++) {
if (binaryStr.indexOf(expectedMetrics[index]) >= 0) { if (binaryStr.indexOf(expectedMetrics[index]) >= 0) {
foundMetrics[index] = true; foundMetrics[index] = true;

View File

@ -17,7 +17,6 @@
*/ */
package org.apache.hadoop.minikdc; package org.apache.hadoop.minikdc;
import org.apache.commons.io.Charsets;
import org.apache.kerby.kerberos.kerb.KrbException; import org.apache.kerby.kerberos.kerb.KrbException;
import org.apache.kerby.kerberos.kerb.server.KdcConfigKey; import org.apache.kerby.kerberos.kerb.server.KdcConfigKey;
import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer; import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer;
@ -31,6 +30,7 @@ import java.io.FileInputStream;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.Locale; import java.util.Locale;
@ -95,7 +95,8 @@ public class MiniKdc {
Properties userConf = new Properties(); Properties userConf = new Properties();
InputStreamReader r = null; InputStreamReader r = null;
try { try {
r = new InputStreamReader(new FileInputStream(file), Charsets.UTF_8); r = new InputStreamReader(new FileInputStream(file),
StandardCharsets.UTF_8);
userConf.load(r); userConf.load(r);
} finally { } finally {
if (r != null) { if (r != null) {

View File

@ -17,9 +17,9 @@
*/ */
package org.apache.hadoop.mount; package org.apache.hadoop.mount;
import java.nio.charset.StandardCharsets;
import java.util.List; import java.util.List;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.NfsExports; import org.apache.hadoop.nfs.NfsExports;
import org.apache.hadoop.oncrpc.RpcAcceptedReply; import org.apache.hadoop.oncrpc.RpcAcceptedReply;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -77,7 +77,8 @@ public class MountResponse {
if (hostGroups.length > 0) { if (hostGroups.length > 0) {
for (int j = 0; j < hostGroups.length; j++) { for (int j = 0; j < hostGroups.length; j++) {
xdr.writeBoolean(true); // Value follows - yes xdr.writeBoolean(true); // Value follows - yes
xdr.writeVariableOpaque(hostGroups[j].getBytes(Charsets.UTF_8)); xdr.writeVariableOpaque(
hostGroups[j].getBytes(StandardCharsets.UTF_8));
} }
} }
xdr.writeBoolean(false); // Value follows - no more group xdr.writeBoolean(false); // Value follows - no more group

View File

@ -18,11 +18,11 @@
package org.apache.hadoop.nfs.nfs3; package org.apache.hadoop.nfs.nfs3;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest; import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.util.Arrays; import java.util.Arrays;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -73,7 +73,7 @@ public class FileHandle {
return; return;
} }
byte[] in = s.getBytes(Charsets.UTF_8); byte[] in = s.getBytes(StandardCharsets.UTF_8);
digest.update(in); digest.update(in);
byte[] digestbytes = digest.digest(); byte[] digestbytes = digest.digest();

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.nfs.nfs3.request; package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.nfs.nfs3.Nfs3Constant; import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -79,9 +79,9 @@ public class CREATE3Request extends RequestWithHandle {
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.length()); xdr.writeInt(name.length());
xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8), name.length()); xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8), name.length());
xdr.writeInt(mode); xdr.writeInt(mode);
objAttr.serialize(xdr); objAttr.serialize(xdr);
} }
} }

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.nfs.nfs3.request; package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -57,6 +57,7 @@ public class LINK3Request extends RequestWithHandle {
handle.serialize(xdr); handle.serialize(xdr);
fromDirHandle.serialize(xdr); fromDirHandle.serialize(xdr);
xdr.writeInt(fromName.length()); xdr.writeInt(fromName.length());
xdr.writeFixedOpaque(fromName.getBytes(Charsets.UTF_8), fromName.length()); xdr.writeFixedOpaque(fromName.getBytes(StandardCharsets.UTF_8),
fromName.length());
} }
} }

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.nfs.nfs3.request; package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -54,7 +54,7 @@ public class LOOKUP3Request extends RequestWithHandle {
@VisibleForTesting @VisibleForTesting
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.getBytes(Charsets.UTF_8).length); xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8)); xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
} }
} }

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.nfs.nfs3.request; package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -55,8 +55,8 @@ public class MKDIR3Request extends RequestWithHandle {
@Override @Override
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.getBytes(Charsets.UTF_8).length); xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8)); xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
objAttr.serialize(xdr); objAttr.serialize(xdr);
} }
} }

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.nfs.nfs3.request; package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.NfsFileType; import org.apache.hadoop.nfs.NfsFileType;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes.Specdata3; import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes.Specdata3;
@ -80,7 +80,7 @@ public class MKNOD3Request extends RequestWithHandle {
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.length()); xdr.writeInt(name.length());
xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8), name.length()); xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8), name.length());
objAttr.serialize(xdr); objAttr.serialize(xdr);
if (spec != null) { if (spec != null) {
xdr.writeInt(spec.getSpecdata1()); xdr.writeInt(spec.getSpecdata1());

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.nfs.nfs3.request; package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -47,7 +47,7 @@ public class REMOVE3Request extends RequestWithHandle {
@Override @Override
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.getBytes(Charsets.UTF_8).length); xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8)); xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
} }
} }

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.nfs.nfs3.request; package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -67,10 +67,10 @@ public class RENAME3Request extends NFS3Request {
@Override @Override
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
fromDirHandle.serialize(xdr); fromDirHandle.serialize(xdr);
xdr.writeInt(fromName.getBytes(Charsets.UTF_8).length); xdr.writeInt(fromName.getBytes(StandardCharsets.UTF_8).length);
xdr.writeFixedOpaque(fromName.getBytes(Charsets.UTF_8)); xdr.writeFixedOpaque(fromName.getBytes(StandardCharsets.UTF_8));
toDirHandle.serialize(xdr); toDirHandle.serialize(xdr);
xdr.writeInt(toName.getBytes(Charsets.UTF_8).length); xdr.writeInt(toName.getBytes(StandardCharsets.UTF_8).length);
xdr.writeFixedOpaque(toName.getBytes(Charsets.UTF_8)); xdr.writeFixedOpaque(toName.getBytes(StandardCharsets.UTF_8));
} }
} }

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.nfs.nfs3.request; package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -47,7 +47,7 @@ public class RMDIR3Request extends RequestWithHandle {
@Override @Override
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.getBytes(Charsets.UTF_8).length); xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8)); xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
} }
} }

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.nfs.nfs3.request; package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -63,10 +63,10 @@ public class SYMLINK3Request extends RequestWithHandle {
@Override @Override
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.getBytes(Charsets.UTF_8).length); xdr.writeInt(name.getBytes(StandardCharsets.UTF_8).length);
xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8)); xdr.writeFixedOpaque(name.getBytes(StandardCharsets.UTF_8));
symAttr.serialize(xdr); symAttr.serialize(xdr);
xdr.writeInt(symData.getBytes(Charsets.UTF_8).length); xdr.writeInt(symData.getBytes(StandardCharsets.UTF_8).length);
xdr.writeFixedOpaque(symData.getBytes(Charsets.UTF_8)); xdr.writeFixedOpaque(symData.getBytes(StandardCharsets.UTF_8));
} }
} }

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.oncrpc; package org.apache.hadoop.oncrpc;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.Charsets;
import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.buffer.ChannelBuffers;
@ -166,11 +166,11 @@ public final class XDR {
} }
public String readString() { public String readString() {
return new String(readVariableOpaque(), Charsets.UTF_8); return new String(readVariableOpaque(), StandardCharsets.UTF_8);
} }
public void writeString(String s) { public void writeString(String s) {
writeVariableOpaque(s.getBytes(Charsets.UTF_8)); writeVariableOpaque(s.getBytes(StandardCharsets.UTF_8));
} }
private void writePadding() { private void writePadding() {
@ -270,4 +270,4 @@ public final class XDR {
return b; return b;
} }
} }

View File

@ -19,9 +19,9 @@ package org.apache.hadoop.oncrpc.security;
import java.net.InetAddress; import java.net.InetAddress;
import java.net.UnknownHostException; import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
/** Credential used by AUTH_SYS */ /** Credential used by AUTH_SYS */
@ -106,11 +106,11 @@ public class CredentialsSys extends Credentials {
public void write(XDR xdr) { public void write(XDR xdr) {
int padding = 0; int padding = 0;
// Ensure there are padding bytes if hostname is not a multiple of 4. // Ensure there are padding bytes if hostname is not a multiple of 4.
padding = 4 - (mHostName.getBytes(Charsets.UTF_8).length % 4); padding = 4 - (mHostName.getBytes(StandardCharsets.UTF_8).length % 4);
// padding bytes is zero if hostname is already a multiple of 4. // padding bytes is zero if hostname is already a multiple of 4.
padding = padding % 4; padding = padding % 4;
// mStamp + mHostName.length + mHostName + mUID + mGID + mAuxGIDs.count // mStamp + mHostName.length + mHostName + mUID + mGID + mAuxGIDs.count
mCredentialsLength = 20 + mHostName.getBytes(Charsets.UTF_8).length; mCredentialsLength = 20 + mHostName.getBytes(StandardCharsets.UTF_8).length;
mCredentialsLength = mCredentialsLength + padding; mCredentialsLength = mCredentialsLength + padding;
// mAuxGIDs // mAuxGIDs
if (mAuxGIDs != null && mAuxGIDs.length > 0) { if (mAuxGIDs != null && mAuxGIDs.length > 0) {