HADOOP-11389. Clean up byte to string encoding issues in hadoop-common. Contributed by Haohui Mai.
This commit is contained in:
parent
23289d13ff
commit
5bc3aae6fb
|
@ -202,6 +202,9 @@ Release 2.7.0 - UNRELEASED
|
|||
|
||||
HADOOP-11211. mapreduce.job.classloader.system.classes semantics should be
|
||||
be order-independent. (Yitong Zhou via gera)
|
||||
|
||||
HADOOP-11389. Clean up byte to string encoding issues in hadoop-common.
|
||||
(wheat9)
|
||||
|
||||
Release 2.6.0 - 2014-11-18
|
||||
|
||||
|
|
|
@ -67,6 +67,7 @@ import javax.xml.transform.TransformerFactory;
|
|||
import javax.xml.transform.dom.DOMSource;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.collections.map.UnmodifiableMap;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
@ -2255,7 +2256,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
|||
LOG.info("found resource " + name + " at " + url);
|
||||
}
|
||||
|
||||
return new InputStreamReader(url.openStream());
|
||||
return new InputStreamReader(url.openStream(), Charsets.UTF_8);
|
||||
} catch (Exception e) {
|
||||
return null;
|
||||
}
|
||||
|
|
|
@ -32,6 +32,7 @@ import java.util.Map;
|
|||
|
||||
import com.google.gson.stream.JsonReader;
|
||||
import com.google.gson.stream.JsonWriter;
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -207,7 +208,8 @@ public abstract class KeyProvider {
|
|||
*/
|
||||
protected byte[] serialize() throws IOException {
|
||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
||||
JsonWriter writer = new JsonWriter(new OutputStreamWriter(buffer));
|
||||
JsonWriter writer = new JsonWriter(
|
||||
new OutputStreamWriter(buffer, Charsets.UTF_8));
|
||||
try {
|
||||
writer.beginObject();
|
||||
if (cipher != null) {
|
||||
|
@ -251,7 +253,7 @@ public abstract class KeyProvider {
|
|||
String description = null;
|
||||
Map<String, String> attributes = null;
|
||||
JsonReader reader = new JsonReader(new InputStreamReader
|
||||
(new ByteArrayInputStream(bytes)));
|
||||
(new ByteArrayInputStream(bytes), Charsets.UTF_8));
|
||||
try {
|
||||
reader.beginObject();
|
||||
while (reader.hasNext()) {
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.hadoop.crypto.key.kms;
|
||||
|
||||
import org.apache.commons.codec.binary.Base64;
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.crypto.key.KeyProvider;
|
||||
|
@ -209,7 +210,7 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
|
|||
}
|
||||
|
||||
private static void writeJson(Map map, OutputStream os) throws IOException {
|
||||
Writer writer = new OutputStreamWriter(os);
|
||||
Writer writer = new OutputStreamWriter(os, Charsets.UTF_8);
|
||||
ObjectMapper jsonMapper = new ObjectMapper();
|
||||
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map);
|
||||
}
|
||||
|
|
|
@ -33,6 +33,7 @@ import org.apache.avro.generic.GenericDatumWriter;
|
|||
import org.apache.avro.io.DatumWriter;
|
||||
import org.apache.avro.io.EncoderFactory;
|
||||
import org.apache.avro.io.JsonEncoder;
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -236,10 +237,10 @@ class Display extends FsCommand {
|
|||
if (!r.next(key, val)) {
|
||||
return -1;
|
||||
}
|
||||
byte[] tmp = key.toString().getBytes();
|
||||
byte[] tmp = key.toString().getBytes(Charsets.UTF_8);
|
||||
outbuf.write(tmp, 0, tmp.length);
|
||||
outbuf.write('\t');
|
||||
tmp = val.toString().getBytes();
|
||||
tmp = val.toString().getBytes(Charsets.UTF_8);
|
||||
outbuf.write(tmp, 0, tmp.length);
|
||||
outbuf.write('\n');
|
||||
inbuf.reset(outbuf.getData(), outbuf.getLength());
|
||||
|
@ -301,7 +302,8 @@ class Display extends FsCommand {
|
|||
encoder.flush();
|
||||
if (!fileReader.hasNext()) {
|
||||
// Write a new line after the last Avro record.
|
||||
output.write(System.getProperty("line.separator").getBytes());
|
||||
output.write(System.getProperty("line.separator")
|
||||
.getBytes(Charsets.UTF_8));
|
||||
output.flush();
|
||||
}
|
||||
pos = 0;
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
/**
|
||||
|
@ -76,7 +77,8 @@ class StreamPumper {
|
|||
}
|
||||
|
||||
protected void pump() throws IOException {
|
||||
InputStreamReader inputStreamReader = new InputStreamReader(stream);
|
||||
InputStreamReader inputStreamReader = new InputStreamReader(
|
||||
stream, Charsets.UTF_8);
|
||||
BufferedReader br = new BufferedReader(inputStreamReader);
|
||||
String line = null;
|
||||
while ((line = br.readLine()) != null) {
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
*/
|
||||
package org.apache.hadoop.http;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
@ -25,11 +27,11 @@ import java.io.OutputStream;
|
|||
* This class is responsible for quoting HTML characters.
|
||||
*/
|
||||
public class HtmlQuoting {
|
||||
private static final byte[] ampBytes = "&".getBytes();
|
||||
private static final byte[] aposBytes = "'".getBytes();
|
||||
private static final byte[] gtBytes = ">".getBytes();
|
||||
private static final byte[] ltBytes = "<".getBytes();
|
||||
private static final byte[] quotBytes = """.getBytes();
|
||||
private static final byte[] ampBytes = "&".getBytes(Charsets.UTF_8);
|
||||
private static final byte[] aposBytes = "'".getBytes(Charsets.UTF_8);
|
||||
private static final byte[] gtBytes = ">".getBytes(Charsets.UTF_8);
|
||||
private static final byte[] ltBytes = "<".getBytes(Charsets.UTF_8);
|
||||
private static final byte[] quotBytes = """.getBytes(Charsets.UTF_8);
|
||||
|
||||
/**
|
||||
* Does the given string need to be quoted?
|
||||
|
@ -63,7 +65,7 @@ public class HtmlQuoting {
|
|||
if (str == null) {
|
||||
return false;
|
||||
}
|
||||
byte[] bytes = str.getBytes();
|
||||
byte[] bytes = str.getBytes(Charsets.UTF_8);
|
||||
return needsQuoting(bytes, 0 , bytes.length);
|
||||
}
|
||||
|
||||
|
@ -98,15 +100,16 @@ public class HtmlQuoting {
|
|||
if (item == null) {
|
||||
return null;
|
||||
}
|
||||
byte[] bytes = item.getBytes();
|
||||
byte[] bytes = item.getBytes(Charsets.UTF_8);
|
||||
if (needsQuoting(bytes, 0, bytes.length)) {
|
||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
||||
try {
|
||||
quoteHtmlChars(buffer, bytes, 0, bytes.length);
|
||||
return buffer.toString("UTF-8");
|
||||
} catch (IOException ioe) {
|
||||
// Won't happen, since it is a bytearrayoutputstream
|
||||
return null;
|
||||
}
|
||||
return buffer.toString();
|
||||
} else {
|
||||
return item;
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@ package org.apache.hadoop.http;
|
|||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.PrintStream;
|
||||
import java.net.BindException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.URL;
|
||||
|
@ -987,9 +987,10 @@ public class HttpServer implements FilterContainer {
|
|||
return;
|
||||
}
|
||||
response.setContentType("text/plain; charset=UTF-8");
|
||||
PrintWriter out = response.getWriter();
|
||||
ReflectionUtils.printThreadInfo(out, "");
|
||||
out.close();
|
||||
try (PrintStream out = new PrintStream(
|
||||
response.getOutputStream(), false, "UTF-8")) {
|
||||
ReflectionUtils.printThreadInfo(out, "");
|
||||
}
|
||||
ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,8 @@ package org.apache.hadoop.http;
|
|||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InterruptedIOException;
|
||||
import java.io.OutputStream;
|
||||
import java.io.PrintStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.net.BindException;
|
||||
import java.net.InetSocketAddress;
|
||||
|
@ -1097,13 +1099,14 @@ public final class HttpServer2 implements FilterContainer {
|
|||
public void doGet(HttpServletRequest request, HttpServletResponse response)
|
||||
throws ServletException, IOException {
|
||||
if (!HttpServer2.isInstrumentationAccessAllowed(getServletContext(),
|
||||
request, response)) {
|
||||
request, response)) {
|
||||
return;
|
||||
}
|
||||
response.setContentType("text/plain; charset=UTF-8");
|
||||
PrintWriter out = response.getWriter();
|
||||
ReflectionUtils.printThreadInfo(out, "");
|
||||
out.close();
|
||||
try (PrintStream out = new PrintStream(
|
||||
response.getOutputStream(), false, "UTF-8")) {
|
||||
ReflectionUtils.printThreadInfo(out, "");
|
||||
}
|
||||
ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.nio.charset.UnsupportedCharsetException;
|
|||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.commons.codec.binary.Base64;
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -90,7 +91,7 @@ public class DefaultStringifier<T> implements Stringifier<T> {
|
|||
serializer.serialize(obj);
|
||||
byte[] buf = new byte[outBuf.getLength()];
|
||||
System.arraycopy(outBuf.getData(), 0, buf, 0, buf.length);
|
||||
return new String(Base64.encodeBase64(buf));
|
||||
return new String(Base64.encodeBase64(buf), Charsets.UTF_8);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -22,6 +22,8 @@ import java.io.*;
|
|||
import java.util.*;
|
||||
import java.rmi.server.UID;
|
||||
import java.security.MessageDigest;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.*;
|
||||
import org.apache.hadoop.util.Options;
|
||||
import org.apache.hadoop.fs.*;
|
||||
|
@ -849,7 +851,7 @@ public class SequenceFile {
|
|||
try {
|
||||
MessageDigest digester = MessageDigest.getInstance("MD5");
|
||||
long time = Time.now();
|
||||
digester.update((new UID()+"@"+time).getBytes());
|
||||
digester.update((new UID()+"@"+time).getBytes(Charsets.UTF_8));
|
||||
sync = digester.digest();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.io.IOException;
|
|||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.hadoop.conf.Configurable;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
||||
|
@ -281,7 +282,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
|
|||
// The compressed bzip2 stream should start with the
|
||||
// identifying characters BZ. Caller of CBZip2OutputStream
|
||||
// i.e. this class must write these characters.
|
||||
out.write(HEADER.getBytes());
|
||||
out.write(HEADER.getBytes(Charsets.UTF_8));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -415,7 +416,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
|
|||
byte[] headerBytes = new byte[HEADER_LEN];
|
||||
int actualRead = bufferedIn.read(headerBytes, 0, HEADER_LEN);
|
||||
if (actualRead != -1) {
|
||||
String header = new String(headerBytes);
|
||||
String header = new String(headerBytes, Charsets.UTF_8);
|
||||
if (header.compareTo(HEADER) != 0) {
|
||||
bufferedIn.reset();
|
||||
} else {
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.util.LinkedHashMap;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -233,7 +234,7 @@ class TFileDumper {
|
|||
out.printf("%X", b);
|
||||
}
|
||||
} else {
|
||||
out.print(new String(key, 0, sampleLen));
|
||||
out.print(new String(key, 0, sampleLen, Charsets.UTF_8));
|
||||
}
|
||||
if (sampleLen < key.length) {
|
||||
out.print("...");
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.ipc;
|
|||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
|
@ -39,7 +40,8 @@ public class RpcConstants {
|
|||
/**
|
||||
* The first four bytes of Hadoop RPC connections
|
||||
*/
|
||||
public static final ByteBuffer HEADER = ByteBuffer.wrap("hrpc".getBytes());
|
||||
public static final ByteBuffer HEADER = ByteBuffer.wrap("hrpc".getBytes
|
||||
(Charsets.UTF_8));
|
||||
|
||||
// 1 : Introduce ping and server does not throw away RPCs
|
||||
// 3 : Introduce the protocol into the RPC connection header
|
||||
|
|
|
@ -68,6 +68,7 @@ import javax.security.sasl.Sasl;
|
|||
import javax.security.sasl.SaslException;
|
||||
import javax.security.sasl.SaslServer;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -179,7 +180,7 @@ public abstract class Server {
|
|||
* and send back a nicer response.
|
||||
*/
|
||||
private static final ByteBuffer HTTP_GET_BYTES = ByteBuffer.wrap(
|
||||
"GET ".getBytes());
|
||||
"GET ".getBytes(Charsets.UTF_8));
|
||||
|
||||
/**
|
||||
* An HTTP response to send back if we detect an HTTP request to our IPC
|
||||
|
@ -1638,7 +1639,7 @@ public abstract class Server {
|
|||
private void setupHttpRequestOnIpcPortResponse() throws IOException {
|
||||
Call fakeCall = new Call(0, RpcConstants.INVALID_RETRY_COUNT, null, this);
|
||||
fakeCall.setResponse(ByteBuffer.wrap(
|
||||
RECEIVED_HTTP_REQ_RESPONSE.getBytes()));
|
||||
RECEIVED_HTTP_REQ_RESPONSE.getBytes(Charsets.UTF_8)));
|
||||
responder.doRespond(fakeCall);
|
||||
}
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.util.regex.Pattern;
|
|||
import javax.servlet.*;
|
||||
import javax.servlet.http.*;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.logging.*;
|
||||
import org.apache.commons.logging.impl.*;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -66,7 +67,7 @@ public class LogLevel {
|
|||
connection.connect();
|
||||
|
||||
BufferedReader in = new BufferedReader(new InputStreamReader(
|
||||
connection.getInputStream()));
|
||||
connection.getInputStream(), Charsets.UTF_8));
|
||||
for(String line; (line = in.readLine()) != null; )
|
||||
if (line.startsWith(MARKER)) {
|
||||
System.out.println(TAG.matcher(line).replaceAll(""));
|
||||
|
|
|
@ -29,6 +29,7 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
|
@ -224,7 +225,7 @@ public class GangliaContext extends AbstractMetricsContext {
|
|||
* a multiple of 4.
|
||||
*/
|
||||
protected void xdr_string(String s) {
|
||||
byte[] bytes = s.getBytes();
|
||||
byte[] bytes = s.getBytes(Charsets.UTF_8);
|
||||
int len = bytes.length;
|
||||
xdr_int(len);
|
||||
System.arraycopy(bytes, 0, buffer, offset, len);
|
||||
|
|
|
@ -272,13 +272,14 @@ class MetricsConfig extends SubsetConfiguration {
|
|||
|
||||
static String toString(Configuration c) {
|
||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
||||
PrintStream ps = new PrintStream(buffer);
|
||||
PropertiesConfiguration tmp = new PropertiesConfiguration();
|
||||
tmp.copy(c);
|
||||
try { tmp.save(ps); }
|
||||
catch (Exception e) {
|
||||
try {
|
||||
PrintStream ps = new PrintStream(buffer, false, "UTF-8");
|
||||
PropertiesConfiguration tmp = new PropertiesConfiguration();
|
||||
tmp.copy(c);
|
||||
tmp.save(ps);
|
||||
return buffer.toString("UTF-8");
|
||||
} catch (Exception e) {
|
||||
throw new MetricsConfigException(e);
|
||||
}
|
||||
return buffer.toString();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,9 +20,9 @@ package org.apache.hadoop.metrics2.sink;
|
|||
|
||||
import java.io.Closeable;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.PrintStream;
|
||||
|
||||
import org.apache.commons.configuration.SubsetConfiguration;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -40,17 +40,16 @@ import org.apache.hadoop.metrics2.MetricsTag;
|
|||
@InterfaceStability.Evolving
|
||||
public class FileSink implements MetricsSink, Closeable {
|
||||
private static final String FILENAME_KEY = "filename";
|
||||
private PrintWriter writer;
|
||||
private PrintStream writer;
|
||||
|
||||
@Override
|
||||
public void init(SubsetConfiguration conf) {
|
||||
String filename = conf.getString(FILENAME_KEY);
|
||||
try {
|
||||
writer = filename == null
|
||||
? new PrintWriter(System.out)
|
||||
: new PrintWriter(new FileWriter(new File(filename), true));
|
||||
}
|
||||
catch (Exception e) {
|
||||
writer = filename == null ? System.out
|
||||
: new PrintStream(new FileOutputStream(new File(filename)),
|
||||
true, "UTF-8");
|
||||
} catch (Exception e) {
|
||||
throw new MetricsException("Error creating "+ filename, e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import java.io.Closeable;
|
|||
import java.net.Socket;
|
||||
|
||||
import org.apache.commons.configuration.SubsetConfiguration;
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -64,7 +65,8 @@ public class GraphiteSink implements MetricsSink, Closeable {
|
|||
try {
|
||||
// Open an connection to Graphite server.
|
||||
socket = new Socket(serverHost, serverPort);
|
||||
writer = new OutputStreamWriter(socket.getOutputStream());
|
||||
writer = new OutputStreamWriter(
|
||||
socket.getOutputStream(), Charsets.UTF_8);
|
||||
} catch (Exception e) {
|
||||
throw new MetricsException("Error creating connection, "
|
||||
+ serverHost + ":" + serverPort, e);
|
||||
|
|
|
@ -29,6 +29,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.configuration.SubsetConfiguration;
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.metrics2.MetricsSink;
|
||||
|
@ -221,7 +222,7 @@ public abstract class AbstractGangliaSink implements MetricsSink {
|
|||
* @param s the string to be written to buffer at offset location
|
||||
*/
|
||||
protected void xdr_string(String s) {
|
||||
byte[] bytes = s.getBytes();
|
||||
byte[] bytes = s.getBytes(Charsets.UTF_8);
|
||||
int len = bytes.length;
|
||||
xdr_int(len);
|
||||
System.arraycopy(bytes, 0, buffer, offset, len);
|
||||
|
|
|
@ -20,13 +20,16 @@ package org.apache.hadoop.net;
|
|||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
@ -96,9 +99,10 @@ public class TableMapping extends CachedDNSToSwitchMapping {
|
|||
return null;
|
||||
}
|
||||
|
||||
BufferedReader reader = null;
|
||||
try {
|
||||
reader = new BufferedReader(new FileReader(filename));
|
||||
|
||||
try (BufferedReader reader =
|
||||
new BufferedReader(new InputStreamReader(
|
||||
new FileInputStream(filename), Charsets.UTF_8))) {
|
||||
String line = reader.readLine();
|
||||
while (line != null) {
|
||||
line = line.trim();
|
||||
|
@ -115,15 +119,6 @@ public class TableMapping extends CachedDNSToSwitchMapping {
|
|||
} catch (Exception e) {
|
||||
LOG.warn(filename + " cannot be read.", e);
|
||||
return null;
|
||||
} finally {
|
||||
if (reader != null) {
|
||||
try {
|
||||
reader.close();
|
||||
} catch (IOException e) {
|
||||
LOG.warn(filename + " cannot be read.", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
return loadMap;
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.security;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.hadoop.http.HttpServer2;
|
||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -24,8 +25,10 @@ import org.apache.hadoop.http.FilterContainer;
|
|||
import org.apache.hadoop.http.FilterInitializer;
|
||||
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
@ -78,10 +81,10 @@ public class AuthenticationFilterInitializer extends FilterInitializer {
|
|||
if (signatureSecretFile == null) {
|
||||
throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE);
|
||||
}
|
||||
|
||||
try {
|
||||
StringBuilder secret = new StringBuilder();
|
||||
Reader reader = new FileReader(signatureSecretFile);
|
||||
|
||||
StringBuilder secret = new StringBuilder();
|
||||
try (Reader reader = new InputStreamReader(
|
||||
new FileInputStream(signatureSecretFile), Charsets.UTF_8)) {
|
||||
int c = reader.read();
|
||||
while (c > -1) {
|
||||
secret.append((char)c);
|
||||
|
|
|
@ -33,6 +33,7 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -218,7 +219,8 @@ public class Credentials implements Writable {
|
|||
readFields(in);
|
||||
}
|
||||
|
||||
private static final byte[] TOKEN_STORAGE_MAGIC = "HDTS".getBytes();
|
||||
private static final byte[] TOKEN_STORAGE_MAGIC =
|
||||
"HDTS".getBytes(Charsets.UTF_8);
|
||||
private static final byte TOKEN_STORAGE_VERSION = 0;
|
||||
|
||||
public void writeTokenStorageToStream(DataOutputStream os)
|
||||
|
|
|
@ -17,8 +17,10 @@
|
|||
*/
|
||||
package org.apache.hadoop.security;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Hashtable;
|
||||
|
@ -34,6 +36,7 @@ import javax.naming.directory.InitialDirContext;
|
|||
import javax.naming.directory.SearchControls;
|
||||
import javax.naming.directory.SearchResult;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -366,9 +369,10 @@ public class LdapGroupsMapping
|
|||
// an anonymous bind
|
||||
return "";
|
||||
}
|
||||
|
||||
try (Reader reader = new FileReader(pwFile)) {
|
||||
StringBuilder password = new StringBuilder();
|
||||
|
||||
StringBuilder password = new StringBuilder();
|
||||
try (Reader reader = new InputStreamReader(
|
||||
new FileInputStream(pwFile), Charsets.UTF_8)) {
|
||||
int c = reader.read();
|
||||
while (c > -1) {
|
||||
password.append((char)c);
|
||||
|
|
|
@ -44,6 +44,7 @@ import javax.security.sasl.SaslServer;
|
|||
import javax.security.sasl.SaslServerFactory;
|
||||
|
||||
import org.apache.commons.codec.binary.Base64;
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -184,11 +185,11 @@ public class SaslRpcServer {
|
|||
}
|
||||
|
||||
static String encodeIdentifier(byte[] identifier) {
|
||||
return new String(Base64.encodeBase64(identifier));
|
||||
return new String(Base64.encodeBase64(identifier), Charsets.UTF_8);
|
||||
}
|
||||
|
||||
static byte[] decodeIdentifier(String identifier) {
|
||||
return Base64.decodeBase64(identifier.getBytes());
|
||||
return Base64.decodeBase64(identifier.getBytes(Charsets.UTF_8));
|
||||
}
|
||||
|
||||
public static <T extends TokenIdentifier> T getIdentifier(String id,
|
||||
|
@ -206,7 +207,8 @@ public class SaslRpcServer {
|
|||
}
|
||||
|
||||
static char[] encodePassword(byte[] password) {
|
||||
return new String(Base64.encodeBase64(password)).toCharArray();
|
||||
return new String(Base64.encodeBase64(password),
|
||||
Charsets.UTF_8).toCharArray();
|
||||
}
|
||||
|
||||
/** Splitting fully qualified Kerberos name into parts */
|
||||
|
|
|
@ -22,11 +22,13 @@ import java.io.File;
|
|||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -217,7 +219,9 @@ public class ShellBasedIdMapping implements IdMappingServiceProvider {
|
|||
try {
|
||||
Process process = Runtime.getRuntime().exec(
|
||||
new String[] { "bash", "-c", command });
|
||||
br = new BufferedReader(new InputStreamReader(process.getInputStream()));
|
||||
br = new BufferedReader(
|
||||
new InputStreamReader(process.getInputStream(),
|
||||
Charset.defaultCharset()));
|
||||
String line = null;
|
||||
while ((line = br.readLine()) != null) {
|
||||
String[] nameId = line.split(regex);
|
||||
|
@ -552,7 +556,7 @@ public class ShellBasedIdMapping implements IdMappingServiceProvider {
|
|||
Map<Integer, Integer> gidMapping = new HashMap<Integer, Integer>();
|
||||
|
||||
BufferedReader in = new BufferedReader(new InputStreamReader(
|
||||
new FileInputStream(staticMapFile)));
|
||||
new FileInputStream(staticMapFile), Charsets.UTF_8));
|
||||
|
||||
try {
|
||||
String line = null;
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
package org.apache.hadoop.security.alias;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -165,7 +166,7 @@ public class JavaKeyStoreProvider extends CredentialProvider {
|
|||
}
|
||||
|
||||
public static char[] bytesToChars(byte[] bytes) {
|
||||
String pass = new String(bytes);
|
||||
String pass = new String(bytes, Charsets.UTF_8);
|
||||
return pass.toCharArray();
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import java.net.URI;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
@ -56,7 +57,8 @@ public class UserProvider extends CredentialProvider {
|
|||
if (bytes == null) {
|
||||
return null;
|
||||
}
|
||||
return new CredentialEntry(alias, new String(bytes).toCharArray());
|
||||
return new CredentialEntry(
|
||||
alias, new String(bytes, Charsets.UTF_8).toCharArray());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -31,6 +31,7 @@ import java.util.Map;
|
|||
import java.util.TreeMap;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -100,7 +101,8 @@ public class SpanReceiverHost implements TraceAdminProtocol {
|
|||
// out of /proc/self/stat. (There isn't any portable way to get the
|
||||
// process ID from Java.)
|
||||
reader = new BufferedReader(
|
||||
new InputStreamReader(new FileInputStream("/proc/self/stat")));
|
||||
new InputStreamReader(new FileInputStream("/proc/self/stat"),
|
||||
Charsets.UTF_8));
|
||||
String line = reader.readLine();
|
||||
if (line == null) {
|
||||
throw new EOFException();
|
||||
|
|
|
@ -25,6 +25,7 @@ import java.util.Arrays;
|
|||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.conf.Configured;
|
||||
|
@ -91,7 +92,7 @@ public class TraceAdmin extends Configured implements Tool {
|
|||
return 1;
|
||||
}
|
||||
ByteArrayOutputStream configStream = new ByteArrayOutputStream();
|
||||
PrintStream configsOut = new PrintStream(configStream);
|
||||
PrintStream configsOut = new PrintStream(configStream, false, "UTF-8");
|
||||
SpanReceiverInfoBuilder factory = new SpanReceiverInfoBuilder(className);
|
||||
String prefix = "";
|
||||
for (int i = 0; i < args.size(); ++i) {
|
||||
|
@ -113,13 +114,15 @@ public class TraceAdmin extends Configured implements Tool {
|
|||
configsOut.print(prefix + key + " = " + value);
|
||||
prefix = ", ";
|
||||
}
|
||||
|
||||
String configStreamStr = configStream.toString("UTF-8");
|
||||
try {
|
||||
long id = remote.addSpanReceiver(factory.build());
|
||||
System.out.println("Added trace span receiver " + id +
|
||||
" with configuration " + configStream.toString());
|
||||
" with configuration " + configStreamStr);
|
||||
} catch (IOException e) {
|
||||
System.out.println("addSpanReceiver error with configuration " +
|
||||
configStream.toString());
|
||||
configStreamStr);
|
||||
throw e;
|
||||
}
|
||||
return 0;
|
||||
|
|
|
@ -19,13 +19,18 @@ package org.apache.hadoop.util;
|
|||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
|
@ -78,7 +83,8 @@ public class FileBasedIPList implements IPList {
|
|||
if (fileName != null) {
|
||||
File file = new File (fileName);
|
||||
if (file.exists()) {
|
||||
FileReader fileReader = new FileReader(file);
|
||||
Reader fileReader = new InputStreamReader(
|
||||
new FileInputStream(file), Charsets.UTF_8);
|
||||
BufferedReader bufferedReader = new BufferedReader(fileReader);
|
||||
List<String> lines = new ArrayList<String>();
|
||||
String line = null;
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.*;
|
|||
import java.util.Set;
|
||||
import java.util.HashSet;
|
||||
|
||||
import org.apache.commons.io.Charsets;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -72,7 +73,8 @@ public class HostsFileReader {
|
|||
throws IOException {
|
||||
BufferedReader reader = null;
|
||||
try {
|
||||
reader = new BufferedReader(new InputStreamReader(fileInputStream));
|
||||
reader = new BufferedReader(
|
||||
new InputStreamReader(fileInputStream, Charsets.UTF_8));
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
String[] nodes = line.split("[ \t\n\f\r]+");
|
||||
|
|
|
@ -20,13 +20,16 @@ package org.apache.hadoop.util;
|
|||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.io.PrintWriter;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.lang.management.ThreadInfo;
|
||||
import java.lang.management.ThreadMXBean;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -154,7 +157,7 @@ public class ReflectionUtils {
|
|||
* @param stream the stream to
|
||||
* @param title a string title for the stack trace
|
||||
*/
|
||||
public synchronized static void printThreadInfo(PrintWriter stream,
|
||||
public synchronized static void printThreadInfo(PrintStream stream,
|
||||
String title) {
|
||||
final int STACK_DEPTH = 20;
|
||||
boolean contention = threadBean.isThreadContentionMonitoringEnabled();
|
||||
|
@ -215,9 +218,12 @@ public class ReflectionUtils {
|
|||
}
|
||||
}
|
||||
if (dumpStack) {
|
||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
||||
printThreadInfo(new PrintWriter(buffer), title);
|
||||
log.info(buffer.toString());
|
||||
try {
|
||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
||||
printThreadInfo(new PrintStream(buffer, false, "UTF-8"), title);
|
||||
log.info(buffer.toString(Charset.defaultCharset().name()));
|
||||
} catch (UnsupportedEncodingException ignored) {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import java.io.File;
|
|||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.Timer;
|
||||
|
@ -493,11 +494,11 @@ abstract public class Shell {
|
|||
timeOutTimer.schedule(timeoutTimerTask, timeOutInterval);
|
||||
}
|
||||
final BufferedReader errReader =
|
||||
new BufferedReader(new InputStreamReader(process
|
||||
.getErrorStream()));
|
||||
new BufferedReader(new InputStreamReader(
|
||||
process.getErrorStream(), Charset.defaultCharset()));
|
||||
BufferedReader inReader =
|
||||
new BufferedReader(new InputStreamReader(process
|
||||
.getInputStream()));
|
||||
new BufferedReader(new InputStreamReader(
|
||||
process.getInputStream(), Charset.defaultCharset()));
|
||||
final StringBuffer errMsg = new StringBuffer();
|
||||
|
||||
// read error and input streams as this would free up the buffers
|
||||
|
|
|
@ -245,9 +245,7 @@ public class TestDataTransferKeepalive {
|
|||
private void assertXceiverCount(int expected) {
|
||||
int count = getXceiverCountWithoutServer();
|
||||
if (count != expected) {
|
||||
ReflectionUtils.printThreadInfo(
|
||||
new PrintWriter(System.err),
|
||||
"Thread dumps");
|
||||
ReflectionUtils.printThreadInfo(System.err, "Thread dumps");
|
||||
fail("Expected " + expected + " xceivers, found " +
|
||||
count);
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue