HADOOP-11389. Clean up byte to string encoding issues in hadoop-common. Contributed by Haohui Mai.
This commit is contained in:
parent
f6f2a3f1c7
commit
5b9fcedb4d
|
@ -574,6 +574,9 @@ Release 2.7.0 - UNRELEASED
|
||||||
|
|
||||||
HADOOP-11211. mapreduce.job.classloader.system.classes semantics should be
|
HADOOP-11211. mapreduce.job.classloader.system.classes semantics should be
|
||||||
be order-independent. (Yitong Zhou via gera)
|
be order-independent. (Yitong Zhou via gera)
|
||||||
|
|
||||||
|
HADOOP-11389. Clean up byte to string encoding issues in hadoop-common.
|
||||||
|
(wheat9)
|
||||||
|
|
||||||
Release 2.6.0 - 2014-11-18
|
Release 2.6.0 - 2014-11-18
|
||||||
|
|
||||||
|
|
|
@ -67,6 +67,7 @@ import javax.xml.transform.TransformerFactory;
|
||||||
import javax.xml.transform.dom.DOMSource;
|
import javax.xml.transform.dom.DOMSource;
|
||||||
import javax.xml.transform.stream.StreamResult;
|
import javax.xml.transform.stream.StreamResult;
|
||||||
|
|
||||||
|
import com.google.common.base.Charsets;
|
||||||
import org.apache.commons.collections.map.UnmodifiableMap;
|
import org.apache.commons.collections.map.UnmodifiableMap;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
@ -2263,7 +2264,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
LOG.info("found resource " + name + " at " + url);
|
LOG.info("found resource " + name + " at " + url);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new InputStreamReader(url.openStream());
|
return new InputStreamReader(url.openStream(), Charsets.UTF_8);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,6 +32,7 @@ import java.util.Map;
|
||||||
|
|
||||||
import com.google.gson.stream.JsonReader;
|
import com.google.gson.stream.JsonReader;
|
||||||
import com.google.gson.stream.JsonWriter;
|
import com.google.gson.stream.JsonWriter;
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -207,7 +208,8 @@ public abstract class KeyProvider {
|
||||||
*/
|
*/
|
||||||
protected byte[] serialize() throws IOException {
|
protected byte[] serialize() throws IOException {
|
||||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
||||||
JsonWriter writer = new JsonWriter(new OutputStreamWriter(buffer));
|
JsonWriter writer = new JsonWriter(
|
||||||
|
new OutputStreamWriter(buffer, Charsets.UTF_8));
|
||||||
try {
|
try {
|
||||||
writer.beginObject();
|
writer.beginObject();
|
||||||
if (cipher != null) {
|
if (cipher != null) {
|
||||||
|
@ -251,7 +253,7 @@ public abstract class KeyProvider {
|
||||||
String description = null;
|
String description = null;
|
||||||
Map<String, String> attributes = null;
|
Map<String, String> attributes = null;
|
||||||
JsonReader reader = new JsonReader(new InputStreamReader
|
JsonReader reader = new JsonReader(new InputStreamReader
|
||||||
(new ByteArrayInputStream(bytes)));
|
(new ByteArrayInputStream(bytes), Charsets.UTF_8));
|
||||||
try {
|
try {
|
||||||
reader.beginObject();
|
reader.beginObject();
|
||||||
while (reader.hasNext()) {
|
while (reader.hasNext()) {
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
package org.apache.hadoop.crypto.key.kms;
|
package org.apache.hadoop.crypto.key.kms;
|
||||||
|
|
||||||
import org.apache.commons.codec.binary.Base64;
|
import org.apache.commons.codec.binary.Base64;
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.crypto.key.KeyProvider;
|
import org.apache.hadoop.crypto.key.KeyProvider;
|
||||||
|
@ -209,7 +210,7 @@ public class KMSClientProvider extends KeyProvider implements CryptoExtension,
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void writeJson(Map map, OutputStream os) throws IOException {
|
private static void writeJson(Map map, OutputStream os) throws IOException {
|
||||||
Writer writer = new OutputStreamWriter(os);
|
Writer writer = new OutputStreamWriter(os, Charsets.UTF_8);
|
||||||
ObjectMapper jsonMapper = new ObjectMapper();
|
ObjectMapper jsonMapper = new ObjectMapper();
|
||||||
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map);
|
jsonMapper.writerWithDefaultPrettyPrinter().writeValue(writer, map);
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,6 +32,7 @@ import org.apache.avro.generic.GenericDatumWriter;
|
||||||
import org.apache.avro.io.DatumWriter;
|
import org.apache.avro.io.DatumWriter;
|
||||||
import org.apache.avro.io.EncoderFactory;
|
import org.apache.avro.io.EncoderFactory;
|
||||||
import org.apache.avro.io.JsonEncoder;
|
import org.apache.avro.io.JsonEncoder;
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -234,10 +235,10 @@ class Display extends FsCommand {
|
||||||
if (!r.next(key, val)) {
|
if (!r.next(key, val)) {
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
byte[] tmp = key.toString().getBytes();
|
byte[] tmp = key.toString().getBytes(Charsets.UTF_8);
|
||||||
outbuf.write(tmp, 0, tmp.length);
|
outbuf.write(tmp, 0, tmp.length);
|
||||||
outbuf.write('\t');
|
outbuf.write('\t');
|
||||||
tmp = val.toString().getBytes();
|
tmp = val.toString().getBytes(Charsets.UTF_8);
|
||||||
outbuf.write(tmp, 0, tmp.length);
|
outbuf.write(tmp, 0, tmp.length);
|
||||||
outbuf.write('\n');
|
outbuf.write('\n');
|
||||||
inbuf.reset(outbuf.getData(), outbuf.getLength());
|
inbuf.reset(outbuf.getData(), outbuf.getLength());
|
||||||
|
@ -299,7 +300,8 @@ class Display extends FsCommand {
|
||||||
encoder.flush();
|
encoder.flush();
|
||||||
if (!fileReader.hasNext()) {
|
if (!fileReader.hasNext()) {
|
||||||
// Write a new line after the last Avro record.
|
// Write a new line after the last Avro record.
|
||||||
output.write(System.getProperty("line.separator").getBytes());
|
output.write(System.getProperty("line.separator")
|
||||||
|
.getBytes(Charsets.UTF_8));
|
||||||
output.flush();
|
output.flush();
|
||||||
}
|
}
|
||||||
pos = 0;
|
pos = 0;
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -76,7 +77,8 @@ class StreamPumper {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void pump() throws IOException {
|
protected void pump() throws IOException {
|
||||||
InputStreamReader inputStreamReader = new InputStreamReader(stream);
|
InputStreamReader inputStreamReader = new InputStreamReader(
|
||||||
|
stream, Charsets.UTF_8);
|
||||||
BufferedReader br = new BufferedReader(inputStreamReader);
|
BufferedReader br = new BufferedReader(inputStreamReader);
|
||||||
String line = null;
|
String line = null;
|
||||||
while ((line = br.readLine()) != null) {
|
while ((line = br.readLine()) != null) {
|
||||||
|
|
|
@ -17,6 +17,8 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.http;
|
package org.apache.hadoop.http;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
|
@ -25,11 +27,11 @@ import java.io.OutputStream;
|
||||||
* This class is responsible for quoting HTML characters.
|
* This class is responsible for quoting HTML characters.
|
||||||
*/
|
*/
|
||||||
public class HtmlQuoting {
|
public class HtmlQuoting {
|
||||||
private static final byte[] ampBytes = "&".getBytes();
|
private static final byte[] ampBytes = "&".getBytes(Charsets.UTF_8);
|
||||||
private static final byte[] aposBytes = "'".getBytes();
|
private static final byte[] aposBytes = "'".getBytes(Charsets.UTF_8);
|
||||||
private static final byte[] gtBytes = ">".getBytes();
|
private static final byte[] gtBytes = ">".getBytes(Charsets.UTF_8);
|
||||||
private static final byte[] ltBytes = "<".getBytes();
|
private static final byte[] ltBytes = "<".getBytes(Charsets.UTF_8);
|
||||||
private static final byte[] quotBytes = """.getBytes();
|
private static final byte[] quotBytes = """.getBytes(Charsets.UTF_8);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Does the given string need to be quoted?
|
* Does the given string need to be quoted?
|
||||||
|
@ -63,7 +65,7 @@ public class HtmlQuoting {
|
||||||
if (str == null) {
|
if (str == null) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
byte[] bytes = str.getBytes();
|
byte[] bytes = str.getBytes(Charsets.UTF_8);
|
||||||
return needsQuoting(bytes, 0 , bytes.length);
|
return needsQuoting(bytes, 0 , bytes.length);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,15 +100,16 @@ public class HtmlQuoting {
|
||||||
if (item == null) {
|
if (item == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
byte[] bytes = item.getBytes();
|
byte[] bytes = item.getBytes(Charsets.UTF_8);
|
||||||
if (needsQuoting(bytes, 0, bytes.length)) {
|
if (needsQuoting(bytes, 0, bytes.length)) {
|
||||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
||||||
try {
|
try {
|
||||||
quoteHtmlChars(buffer, bytes, 0, bytes.length);
|
quoteHtmlChars(buffer, bytes, 0, bytes.length);
|
||||||
|
return buffer.toString("UTF-8");
|
||||||
} catch (IOException ioe) {
|
} catch (IOException ioe) {
|
||||||
// Won't happen, since it is a bytearrayoutputstream
|
// Won't happen, since it is a bytearrayoutputstream
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
return buffer.toString();
|
|
||||||
} else {
|
} else {
|
||||||
return item;
|
return item;
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,8 @@ package org.apache.hadoop.http;
|
||||||
import java.io.FileNotFoundException;
|
import java.io.FileNotFoundException;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InterruptedIOException;
|
import java.io.InterruptedIOException;
|
||||||
|
import java.io.OutputStream;
|
||||||
|
import java.io.PrintStream;
|
||||||
import java.io.PrintWriter;
|
import java.io.PrintWriter;
|
||||||
import java.net.BindException;
|
import java.net.BindException;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
|
@ -1065,13 +1067,14 @@ public final class HttpServer2 implements FilterContainer {
|
||||||
public void doGet(HttpServletRequest request, HttpServletResponse response)
|
public void doGet(HttpServletRequest request, HttpServletResponse response)
|
||||||
throws ServletException, IOException {
|
throws ServletException, IOException {
|
||||||
if (!HttpServer2.isInstrumentationAccessAllowed(getServletContext(),
|
if (!HttpServer2.isInstrumentationAccessAllowed(getServletContext(),
|
||||||
request, response)) {
|
request, response)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
response.setContentType("text/plain; charset=UTF-8");
|
response.setContentType("text/plain; charset=UTF-8");
|
||||||
PrintWriter out = response.getWriter();
|
try (PrintStream out = new PrintStream(
|
||||||
ReflectionUtils.printThreadInfo(out, "");
|
response.getOutputStream(), false, "UTF-8")) {
|
||||||
out.close();
|
ReflectionUtils.printThreadInfo(out, "");
|
||||||
|
}
|
||||||
ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1);
|
ReflectionUtils.logThreadInfo(LOG, "jsp requested", 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,7 @@ import java.nio.charset.UnsupportedCharsetException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
|
||||||
import org.apache.commons.codec.binary.Base64;
|
import org.apache.commons.codec.binary.Base64;
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -90,7 +91,7 @@ public class DefaultStringifier<T> implements Stringifier<T> {
|
||||||
serializer.serialize(obj);
|
serializer.serialize(obj);
|
||||||
byte[] buf = new byte[outBuf.getLength()];
|
byte[] buf = new byte[outBuf.getLength()];
|
||||||
System.arraycopy(outBuf.getData(), 0, buf, 0, buf.length);
|
System.arraycopy(outBuf.getData(), 0, buf, 0, buf.length);
|
||||||
return new String(Base64.encodeBase64(buf));
|
return new String(Base64.encodeBase64(buf), Charsets.UTF_8);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -22,6 +22,8 @@ import java.io.*;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import java.rmi.server.UID;
|
import java.rmi.server.UID;
|
||||||
import java.security.MessageDigest;
|
import java.security.MessageDigest;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.logging.*;
|
import org.apache.commons.logging.*;
|
||||||
import org.apache.hadoop.util.Options;
|
import org.apache.hadoop.util.Options;
|
||||||
import org.apache.hadoop.fs.*;
|
import org.apache.hadoop.fs.*;
|
||||||
|
@ -849,7 +851,7 @@ public class SequenceFile {
|
||||||
try {
|
try {
|
||||||
MessageDigest digester = MessageDigest.getInstance("MD5");
|
MessageDigest digester = MessageDigest.getInstance("MD5");
|
||||||
long time = Time.now();
|
long time = Time.now();
|
||||||
digester.update((new UID()+"@"+time).getBytes());
|
digester.update((new UID()+"@"+time).getBytes(Charsets.UTF_8));
|
||||||
sync = digester.digest();
|
sync = digester.digest();
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
|
|
@ -23,6 +23,7 @@ import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.hadoop.conf.Configurable;
|
import org.apache.hadoop.conf.Configurable;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
|
||||||
|
@ -281,7 +282,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
|
||||||
// The compressed bzip2 stream should start with the
|
// The compressed bzip2 stream should start with the
|
||||||
// identifying characters BZ. Caller of CBZip2OutputStream
|
// identifying characters BZ. Caller of CBZip2OutputStream
|
||||||
// i.e. this class must write these characters.
|
// i.e. this class must write these characters.
|
||||||
out.write(HEADER.getBytes());
|
out.write(HEADER.getBytes(Charsets.UTF_8));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -415,7 +416,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
|
||||||
byte[] headerBytes = new byte[HEADER_LEN];
|
byte[] headerBytes = new byte[HEADER_LEN];
|
||||||
int actualRead = bufferedIn.read(headerBytes, 0, HEADER_LEN);
|
int actualRead = bufferedIn.read(headerBytes, 0, HEADER_LEN);
|
||||||
if (actualRead != -1) {
|
if (actualRead != -1) {
|
||||||
String header = new String(headerBytes);
|
String header = new String(headerBytes, Charsets.UTF_8);
|
||||||
if (header.compareTo(HEADER) != 0) {
|
if (header.compareTo(HEADER) != 0) {
|
||||||
bufferedIn.reset();
|
bufferedIn.reset();
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -24,6 +24,7 @@ import java.util.LinkedHashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -233,7 +234,7 @@ class TFileDumper {
|
||||||
out.printf("%X", b);
|
out.printf("%X", b);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
out.print(new String(key, 0, sampleLen));
|
out.print(new String(key, 0, sampleLen, Charsets.UTF_8));
|
||||||
}
|
}
|
||||||
if (sampleLen < key.length) {
|
if (sampleLen < key.length) {
|
||||||
out.print("...");
|
out.print("...");
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.ipc;
|
||||||
|
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
|
@ -53,7 +54,8 @@ public class RpcConstants {
|
||||||
/**
|
/**
|
||||||
* The first four bytes of Hadoop RPC connections
|
* The first four bytes of Hadoop RPC connections
|
||||||
*/
|
*/
|
||||||
public static final ByteBuffer HEADER = ByteBuffer.wrap("hrpc".getBytes());
|
public static final ByteBuffer HEADER = ByteBuffer.wrap("hrpc".getBytes
|
||||||
|
(Charsets.UTF_8));
|
||||||
public static final int HEADER_LEN_AFTER_HRPC_PART = 3; // 3 bytes that follow
|
public static final int HEADER_LEN_AFTER_HRPC_PART = 3; // 3 bytes that follow
|
||||||
|
|
||||||
// 1 : Introduce ping and server does not throw away RPCs
|
// 1 : Introduce ping and server does not throw away RPCs
|
||||||
|
|
|
@ -69,6 +69,7 @@ import javax.security.sasl.Sasl;
|
||||||
import javax.security.sasl.SaslException;
|
import javax.security.sasl.SaslException;
|
||||||
import javax.security.sasl.SaslServer;
|
import javax.security.sasl.SaslServer;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
@ -182,7 +183,7 @@ public abstract class Server {
|
||||||
* and send back a nicer response.
|
* and send back a nicer response.
|
||||||
*/
|
*/
|
||||||
private static final ByteBuffer HTTP_GET_BYTES = ByteBuffer.wrap(
|
private static final ByteBuffer HTTP_GET_BYTES = ByteBuffer.wrap(
|
||||||
"GET ".getBytes());
|
"GET ".getBytes(Charsets.UTF_8));
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An HTTP response to send back if we detect an HTTP request to our IPC
|
* An HTTP response to send back if we detect an HTTP request to our IPC
|
||||||
|
@ -1709,7 +1710,7 @@ public abstract class Server {
|
||||||
private void setupHttpRequestOnIpcPortResponse() throws IOException {
|
private void setupHttpRequestOnIpcPortResponse() throws IOException {
|
||||||
Call fakeCall = new Call(0, RpcConstants.INVALID_RETRY_COUNT, null, this);
|
Call fakeCall = new Call(0, RpcConstants.INVALID_RETRY_COUNT, null, this);
|
||||||
fakeCall.setResponse(ByteBuffer.wrap(
|
fakeCall.setResponse(ByteBuffer.wrap(
|
||||||
RECEIVED_HTTP_REQ_RESPONSE.getBytes()));
|
RECEIVED_HTTP_REQ_RESPONSE.getBytes(Charsets.UTF_8)));
|
||||||
responder.doRespond(fakeCall);
|
responder.doRespond(fakeCall);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,7 @@ import java.util.regex.Pattern;
|
||||||
import javax.servlet.*;
|
import javax.servlet.*;
|
||||||
import javax.servlet.http.*;
|
import javax.servlet.http.*;
|
||||||
|
|
||||||
|
import com.google.common.base.Charsets;
|
||||||
import org.apache.commons.logging.*;
|
import org.apache.commons.logging.*;
|
||||||
import org.apache.commons.logging.impl.*;
|
import org.apache.commons.logging.impl.*;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
@ -66,7 +67,7 @@ public class LogLevel {
|
||||||
connection.connect();
|
connection.connect();
|
||||||
|
|
||||||
BufferedReader in = new BufferedReader(new InputStreamReader(
|
BufferedReader in = new BufferedReader(new InputStreamReader(
|
||||||
connection.getInputStream()));
|
connection.getInputStream(), Charsets.UTF_8));
|
||||||
for(String line; (line = in.readLine()) != null; )
|
for(String line; (line = in.readLine()) != null; )
|
||||||
if (line.startsWith(MARKER)) {
|
if (line.startsWith(MARKER)) {
|
||||||
System.out.println(TAG.matcher(line).replaceAll(""));
|
System.out.println(TAG.matcher(line).replaceAll(""));
|
||||||
|
|
|
@ -29,6 +29,7 @@ import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
@ -225,7 +226,7 @@ public class GangliaContext extends AbstractMetricsContext {
|
||||||
* a multiple of 4.
|
* a multiple of 4.
|
||||||
*/
|
*/
|
||||||
protected void xdr_string(String s) {
|
protected void xdr_string(String s) {
|
||||||
byte[] bytes = s.getBytes();
|
byte[] bytes = s.getBytes(Charsets.UTF_8);
|
||||||
int len = bytes.length;
|
int len = bytes.length;
|
||||||
xdr_int(len);
|
xdr_int(len);
|
||||||
System.arraycopy(bytes, 0, buffer, offset, len);
|
System.arraycopy(bytes, 0, buffer, offset, len);
|
||||||
|
|
|
@ -269,14 +269,14 @@ class MetricsConfig extends SubsetConfiguration {
|
||||||
|
|
||||||
static String toString(Configuration c) {
|
static String toString(Configuration c) {
|
||||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
||||||
PrintStream ps = new PrintStream(buffer);
|
|
||||||
PropertiesConfiguration tmp = new PropertiesConfiguration();
|
|
||||||
tmp.copy(c);
|
|
||||||
try {
|
try {
|
||||||
|
PrintStream ps = new PrintStream(buffer, false, "UTF-8");
|
||||||
|
PropertiesConfiguration tmp = new PropertiesConfiguration();
|
||||||
|
tmp.copy(c);
|
||||||
tmp.save(ps);
|
tmp.save(ps);
|
||||||
|
return buffer.toString("UTF-8");
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new MetricsConfigException(e);
|
throw new MetricsConfigException(e);
|
||||||
}
|
}
|
||||||
return buffer.toString();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,9 +20,9 @@ package org.apache.hadoop.metrics2.sink;
|
||||||
|
|
||||||
import java.io.Closeable;
|
import java.io.Closeable;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileWriter;
|
import java.io.FileOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.PrintWriter;
|
import java.io.PrintStream;
|
||||||
|
|
||||||
import org.apache.commons.configuration.SubsetConfiguration;
|
import org.apache.commons.configuration.SubsetConfiguration;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
@ -40,15 +40,15 @@ import org.apache.hadoop.metrics2.MetricsTag;
|
||||||
@InterfaceStability.Evolving
|
@InterfaceStability.Evolving
|
||||||
public class FileSink implements MetricsSink, Closeable {
|
public class FileSink implements MetricsSink, Closeable {
|
||||||
private static final String FILENAME_KEY = "filename";
|
private static final String FILENAME_KEY = "filename";
|
||||||
private PrintWriter writer;
|
private PrintStream writer;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void init(SubsetConfiguration conf) {
|
public void init(SubsetConfiguration conf) {
|
||||||
String filename = conf.getString(FILENAME_KEY);
|
String filename = conf.getString(FILENAME_KEY);
|
||||||
try {
|
try {
|
||||||
writer = filename == null
|
writer = filename == null ? System.out
|
||||||
? new PrintWriter(System.out)
|
: new PrintStream(new FileOutputStream(new File(filename)),
|
||||||
: new PrintWriter(new FileWriter(new File(filename), true));
|
true, "UTF-8");
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new MetricsException("Error creating "+ filename, e);
|
throw new MetricsException("Error creating "+ filename, e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@ import java.io.Closeable;
|
||||||
import java.net.Socket;
|
import java.net.Socket;
|
||||||
|
|
||||||
import org.apache.commons.configuration.SubsetConfiguration;
|
import org.apache.commons.configuration.SubsetConfiguration;
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
@ -64,7 +65,8 @@ public class GraphiteSink implements MetricsSink, Closeable {
|
||||||
try {
|
try {
|
||||||
// Open an connection to Graphite server.
|
// Open an connection to Graphite server.
|
||||||
socket = new Socket(serverHost, serverPort);
|
socket = new Socket(serverHost, serverPort);
|
||||||
writer = new OutputStreamWriter(socket.getOutputStream());
|
writer = new OutputStreamWriter(
|
||||||
|
socket.getOutputStream(), Charsets.UTF_8);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new MetricsException("Error creating connection, "
|
throw new MetricsException("Error creating connection, "
|
||||||
+ serverHost + ":" + serverPort, e);
|
+ serverHost + ":" + serverPort, e);
|
||||||
|
|
|
@ -29,6 +29,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.commons.configuration.SubsetConfiguration;
|
import org.apache.commons.configuration.SubsetConfiguration;
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.metrics2.MetricsSink;
|
import org.apache.hadoop.metrics2.MetricsSink;
|
||||||
|
@ -223,7 +224,7 @@ public abstract class AbstractGangliaSink implements MetricsSink {
|
||||||
* @param s the string to be written to buffer at offset location
|
* @param s the string to be written to buffer at offset location
|
||||||
*/
|
*/
|
||||||
protected void xdr_string(String s) {
|
protected void xdr_string(String s) {
|
||||||
byte[] bytes = s.getBytes();
|
byte[] bytes = s.getBytes(Charsets.UTF_8);
|
||||||
int len = bytes.length;
|
int len = bytes.length;
|
||||||
xdr_int(len);
|
xdr_int(len);
|
||||||
System.arraycopy(bytes, 0, buffer, offset, len);
|
System.arraycopy(bytes, 0, buffer, offset, len);
|
||||||
|
|
|
@ -20,13 +20,16 @@ package org.apache.hadoop.net;
|
||||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY;
|
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
|
import java.io.FileInputStream;
|
||||||
import java.io.FileReader;
|
import java.io.FileReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.lang.StringUtils;
|
import org.apache.commons.lang.StringUtils;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
@ -96,9 +99,10 @@ public class TableMapping extends CachedDNSToSwitchMapping {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
BufferedReader reader = null;
|
|
||||||
try {
|
try (BufferedReader reader =
|
||||||
reader = new BufferedReader(new FileReader(filename));
|
new BufferedReader(new InputStreamReader(
|
||||||
|
new FileInputStream(filename), Charsets.UTF_8))) {
|
||||||
String line = reader.readLine();
|
String line = reader.readLine();
|
||||||
while (line != null) {
|
while (line != null) {
|
||||||
line = line.trim();
|
line = line.trim();
|
||||||
|
@ -115,15 +119,6 @@ public class TableMapping extends CachedDNSToSwitchMapping {
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOG.warn(filename + " cannot be read.", e);
|
LOG.warn(filename + " cannot be read.", e);
|
||||||
return null;
|
return null;
|
||||||
} finally {
|
|
||||||
if (reader != null) {
|
|
||||||
try {
|
|
||||||
reader.close();
|
|
||||||
} catch (IOException e) {
|
|
||||||
LOG.warn(filename + " cannot be read.", e);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return loadMap;
|
return loadMap;
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.security;
|
package org.apache.hadoop.security;
|
||||||
|
|
||||||
|
import com.google.common.base.Charsets;
|
||||||
import org.apache.hadoop.http.HttpServer2;
|
import org.apache.hadoop.http.HttpServer2;
|
||||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -24,8 +25,10 @@ import org.apache.hadoop.http.FilterContainer;
|
||||||
import org.apache.hadoop.http.FilterInitializer;
|
import org.apache.hadoop.http.FilterInitializer;
|
||||||
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
|
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
|
||||||
|
|
||||||
|
import java.io.FileInputStream;
|
||||||
import java.io.FileReader;
|
import java.io.FileReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
import java.io.Reader;
|
import java.io.Reader;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -78,10 +81,10 @@ public class AuthenticationFilterInitializer extends FilterInitializer {
|
||||||
if (signatureSecretFile == null) {
|
if (signatureSecretFile == null) {
|
||||||
throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE);
|
throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
StringBuilder secret = new StringBuilder();
|
||||||
StringBuilder secret = new StringBuilder();
|
try (Reader reader = new InputStreamReader(
|
||||||
Reader reader = new FileReader(signatureSecretFile);
|
new FileInputStream(signatureSecretFile), Charsets.UTF_8)) {
|
||||||
int c = reader.read();
|
int c = reader.read();
|
||||||
while (c > -1) {
|
while (c > -1) {
|
||||||
secret.append((char)c);
|
secret.append((char)c);
|
||||||
|
|
|
@ -32,6 +32,7 @@ import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
@ -217,7 +218,8 @@ public class Credentials implements Writable {
|
||||||
readFields(in);
|
readFields(in);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final byte[] TOKEN_STORAGE_MAGIC = "HDTS".getBytes();
|
private static final byte[] TOKEN_STORAGE_MAGIC =
|
||||||
|
"HDTS".getBytes(Charsets.UTF_8);
|
||||||
private static final byte TOKEN_STORAGE_VERSION = 0;
|
private static final byte TOKEN_STORAGE_VERSION = 0;
|
||||||
|
|
||||||
public void writeTokenStorageToStream(DataOutputStream os)
|
public void writeTokenStorageToStream(DataOutputStream os)
|
||||||
|
|
|
@ -17,8 +17,10 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.security;
|
package org.apache.hadoop.security;
|
||||||
|
|
||||||
|
import java.io.FileInputStream;
|
||||||
import java.io.FileReader;
|
import java.io.FileReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
import java.io.Reader;
|
import java.io.Reader;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Hashtable;
|
import java.util.Hashtable;
|
||||||
|
@ -34,6 +36,7 @@ import javax.naming.directory.InitialDirContext;
|
||||||
import javax.naming.directory.SearchControls;
|
import javax.naming.directory.SearchControls;
|
||||||
import javax.naming.directory.SearchResult;
|
import javax.naming.directory.SearchResult;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
@ -366,9 +369,10 @@ public class LdapGroupsMapping
|
||||||
// an anonymous bind
|
// an anonymous bind
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
try (Reader reader = new FileReader(pwFile)) {
|
StringBuilder password = new StringBuilder();
|
||||||
StringBuilder password = new StringBuilder();
|
try (Reader reader = new InputStreamReader(
|
||||||
|
new FileInputStream(pwFile), Charsets.UTF_8)) {
|
||||||
int c = reader.read();
|
int c = reader.read();
|
||||||
while (c > -1) {
|
while (c > -1) {
|
||||||
password.append((char)c);
|
password.append((char)c);
|
||||||
|
|
|
@ -44,6 +44,7 @@ import javax.security.sasl.SaslServer;
|
||||||
import javax.security.sasl.SaslServerFactory;
|
import javax.security.sasl.SaslServerFactory;
|
||||||
|
|
||||||
import org.apache.commons.codec.binary.Base64;
|
import org.apache.commons.codec.binary.Base64;
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
@ -184,11 +185,11 @@ public class SaslRpcServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
static String encodeIdentifier(byte[] identifier) {
|
static String encodeIdentifier(byte[] identifier) {
|
||||||
return new String(Base64.encodeBase64(identifier));
|
return new String(Base64.encodeBase64(identifier), Charsets.UTF_8);
|
||||||
}
|
}
|
||||||
|
|
||||||
static byte[] decodeIdentifier(String identifier) {
|
static byte[] decodeIdentifier(String identifier) {
|
||||||
return Base64.decodeBase64(identifier.getBytes());
|
return Base64.decodeBase64(identifier.getBytes(Charsets.UTF_8));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static <T extends TokenIdentifier> T getIdentifier(String id,
|
public static <T extends TokenIdentifier> T getIdentifier(String id,
|
||||||
|
@ -206,7 +207,8 @@ public class SaslRpcServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
static char[] encodePassword(byte[] password) {
|
static char[] encodePassword(byte[] password) {
|
||||||
return new String(Base64.encodeBase64(password)).toCharArray();
|
return new String(Base64.encodeBase64(password),
|
||||||
|
Charsets.UTF_8).toCharArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Splitting fully qualified Kerberos name into parts */
|
/** Splitting fully qualified Kerberos name into parts */
|
||||||
|
|
|
@ -22,11 +22,13 @@ import java.io.File;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
|
import java.nio.charset.Charset;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -217,7 +219,9 @@ public class ShellBasedIdMapping implements IdMappingServiceProvider {
|
||||||
try {
|
try {
|
||||||
Process process = Runtime.getRuntime().exec(
|
Process process = Runtime.getRuntime().exec(
|
||||||
new String[] { "bash", "-c", command });
|
new String[] { "bash", "-c", command });
|
||||||
br = new BufferedReader(new InputStreamReader(process.getInputStream()));
|
br = new BufferedReader(
|
||||||
|
new InputStreamReader(process.getInputStream(),
|
||||||
|
Charset.defaultCharset()));
|
||||||
String line = null;
|
String line = null;
|
||||||
while ((line = br.readLine()) != null) {
|
while ((line = br.readLine()) != null) {
|
||||||
String[] nameId = line.split(regex);
|
String[] nameId = line.split(regex);
|
||||||
|
@ -552,7 +556,7 @@ public class ShellBasedIdMapping implements IdMappingServiceProvider {
|
||||||
Map<Integer, Integer> gidMapping = new HashMap<Integer, Integer>();
|
Map<Integer, Integer> gidMapping = new HashMap<Integer, Integer>();
|
||||||
|
|
||||||
BufferedReader in = new BufferedReader(new InputStreamReader(
|
BufferedReader in = new BufferedReader(new InputStreamReader(
|
||||||
new FileInputStream(staticMapFile)));
|
new FileInputStream(staticMapFile), Charsets.UTF_8));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
String line = null;
|
String line = null;
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
|
|
||||||
package org.apache.hadoop.security.alias;
|
package org.apache.hadoop.security.alias;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.io.IOUtils;
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -165,7 +166,7 @@ public class JavaKeyStoreProvider extends CredentialProvider {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static char[] bytesToChars(byte[] bytes) {
|
public static char[] bytesToChars(byte[] bytes) {
|
||||||
String pass = new String(bytes);
|
String pass = new String(bytes, Charsets.UTF_8);
|
||||||
return pass.toCharArray();
|
return pass.toCharArray();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,7 @@ import java.net.URI;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
@ -56,7 +57,8 @@ public class UserProvider extends CredentialProvider {
|
||||||
if (bytes == null) {
|
if (bytes == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
return new CredentialEntry(alias, new String(bytes).toCharArray());
|
return new CredentialEntry(
|
||||||
|
alias, new String(bytes, Charsets.UTF_8).toCharArray());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -31,6 +31,7 @@ import java.util.Map;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
@ -100,7 +101,8 @@ public class SpanReceiverHost implements TraceAdminProtocol {
|
||||||
// out of /proc/self/stat. (There isn't any portable way to get the
|
// out of /proc/self/stat. (There isn't any portable way to get the
|
||||||
// process ID from Java.)
|
// process ID from Java.)
|
||||||
reader = new BufferedReader(
|
reader = new BufferedReader(
|
||||||
new InputStreamReader(new FileInputStream("/proc/self/stat")));
|
new InputStreamReader(new FileInputStream("/proc/self/stat"),
|
||||||
|
Charsets.UTF_8));
|
||||||
String line = reader.readLine();
|
String line = reader.readLine();
|
||||||
if (line == null) {
|
if (line == null) {
|
||||||
throw new EOFException();
|
throw new EOFException();
|
||||||
|
|
|
@ -25,6 +25,7 @@ import java.util.Arrays;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
|
@ -91,7 +92,7 @@ public class TraceAdmin extends Configured implements Tool {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
ByteArrayOutputStream configStream = new ByteArrayOutputStream();
|
ByteArrayOutputStream configStream = new ByteArrayOutputStream();
|
||||||
PrintStream configsOut = new PrintStream(configStream);
|
PrintStream configsOut = new PrintStream(configStream, false, "UTF-8");
|
||||||
SpanReceiverInfoBuilder factory = new SpanReceiverInfoBuilder(className);
|
SpanReceiverInfoBuilder factory = new SpanReceiverInfoBuilder(className);
|
||||||
String prefix = "";
|
String prefix = "";
|
||||||
for (int i = 0; i < args.size(); ++i) {
|
for (int i = 0; i < args.size(); ++i) {
|
||||||
|
@ -113,13 +114,15 @@ public class TraceAdmin extends Configured implements Tool {
|
||||||
configsOut.print(prefix + key + " = " + value);
|
configsOut.print(prefix + key + " = " + value);
|
||||||
prefix = ", ";
|
prefix = ", ";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
String configStreamStr = configStream.toString("UTF-8");
|
||||||
try {
|
try {
|
||||||
long id = remote.addSpanReceiver(factory.build());
|
long id = remote.addSpanReceiver(factory.build());
|
||||||
System.out.println("Added trace span receiver " + id +
|
System.out.println("Added trace span receiver " + id +
|
||||||
" with configuration " + configStream.toString());
|
" with configuration " + configStreamStr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
System.out.println("addSpanReceiver error with configuration " +
|
System.out.println("addSpanReceiver error with configuration " +
|
||||||
configStream.toString());
|
configStreamStr);
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
|
|
|
@ -19,13 +19,18 @@ package org.apache.hadoop.util;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
import java.io.FileInputStream;
|
||||||
import java.io.FileReader;
|
import java.io.FileReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.io.Reader;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
@ -78,7 +83,8 @@ public class FileBasedIPList implements IPList {
|
||||||
if (fileName != null) {
|
if (fileName != null) {
|
||||||
File file = new File (fileName);
|
File file = new File (fileName);
|
||||||
if (file.exists()) {
|
if (file.exists()) {
|
||||||
FileReader fileReader = new FileReader(file);
|
Reader fileReader = new InputStreamReader(
|
||||||
|
new FileInputStream(file), Charsets.UTF_8);
|
||||||
BufferedReader bufferedReader = new BufferedReader(fileReader);
|
BufferedReader bufferedReader = new BufferedReader(fileReader);
|
||||||
List<String> lines = new ArrayList<String>();
|
List<String> lines = new ArrayList<String>();
|
||||||
String line = null;
|
String line = null;
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.io.*;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
|
||||||
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
@ -72,7 +73,8 @@ public class HostsFileReader {
|
||||||
throws IOException {
|
throws IOException {
|
||||||
BufferedReader reader = null;
|
BufferedReader reader = null;
|
||||||
try {
|
try {
|
||||||
reader = new BufferedReader(new InputStreamReader(fileInputStream));
|
reader = new BufferedReader(
|
||||||
|
new InputStreamReader(fileInputStream, Charsets.UTF_8));
|
||||||
String line;
|
String line;
|
||||||
while ((line = reader.readLine()) != null) {
|
while ((line = reader.readLine()) != null) {
|
||||||
String[] nodes = line.split("[ \t\n\f\r]+");
|
String[] nodes = line.split("[ \t\n\f\r]+");
|
||||||
|
|
|
@ -20,13 +20,16 @@ package org.apache.hadoop.util;
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.PrintStream;
|
||||||
import java.io.PrintWriter;
|
import java.io.PrintWriter;
|
||||||
|
import java.io.UnsupportedEncodingException;
|
||||||
import java.lang.management.ManagementFactory;
|
import java.lang.management.ManagementFactory;
|
||||||
import java.lang.management.ThreadInfo;
|
import java.lang.management.ThreadInfo;
|
||||||
import java.lang.management.ThreadMXBean;
|
import java.lang.management.ThreadMXBean;
|
||||||
import java.lang.reflect.Constructor;
|
import java.lang.reflect.Constructor;
|
||||||
import java.lang.reflect.Field;
|
import java.lang.reflect.Field;
|
||||||
import java.lang.reflect.Method;
|
import java.lang.reflect.Method;
|
||||||
|
import java.nio.charset.Charset;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -154,7 +157,7 @@ public class ReflectionUtils {
|
||||||
* @param stream the stream to
|
* @param stream the stream to
|
||||||
* @param title a string title for the stack trace
|
* @param title a string title for the stack trace
|
||||||
*/
|
*/
|
||||||
public synchronized static void printThreadInfo(PrintWriter stream,
|
public synchronized static void printThreadInfo(PrintStream stream,
|
||||||
String title) {
|
String title) {
|
||||||
final int STACK_DEPTH = 20;
|
final int STACK_DEPTH = 20;
|
||||||
boolean contention = threadBean.isThreadContentionMonitoringEnabled();
|
boolean contention = threadBean.isThreadContentionMonitoringEnabled();
|
||||||
|
@ -215,9 +218,12 @@ public class ReflectionUtils {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (dumpStack) {
|
if (dumpStack) {
|
||||||
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
try {
|
||||||
printThreadInfo(new PrintWriter(buffer), title);
|
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
|
||||||
log.info(buffer.toString());
|
printThreadInfo(new PrintStream(buffer, false, "UTF-8"), title);
|
||||||
|
log.info(buffer.toString(Charset.defaultCharset().name()));
|
||||||
|
} catch (UnsupportedEncodingException ignored) {
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStreamReader;
|
import java.io.InputStreamReader;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
|
import java.nio.charset.Charset;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Timer;
|
import java.util.Timer;
|
||||||
|
@ -493,11 +494,11 @@ abstract public class Shell {
|
||||||
timeOutTimer.schedule(timeoutTimerTask, timeOutInterval);
|
timeOutTimer.schedule(timeoutTimerTask, timeOutInterval);
|
||||||
}
|
}
|
||||||
final BufferedReader errReader =
|
final BufferedReader errReader =
|
||||||
new BufferedReader(new InputStreamReader(process
|
new BufferedReader(new InputStreamReader(
|
||||||
.getErrorStream()));
|
process.getErrorStream(), Charset.defaultCharset()));
|
||||||
BufferedReader inReader =
|
BufferedReader inReader =
|
||||||
new BufferedReader(new InputStreamReader(process
|
new BufferedReader(new InputStreamReader(
|
||||||
.getInputStream()));
|
process.getInputStream(), Charset.defaultCharset()));
|
||||||
final StringBuffer errMsg = new StringBuffer();
|
final StringBuffer errMsg = new StringBuffer();
|
||||||
|
|
||||||
// read error and input streams as this would free up the buffers
|
// read error and input streams as this would free up the buffers
|
||||||
|
|
|
@ -245,9 +245,7 @@ public class TestDataTransferKeepalive {
|
||||||
private void assertXceiverCount(int expected) {
|
private void assertXceiverCount(int expected) {
|
||||||
int count = getXceiverCountWithoutServer();
|
int count = getXceiverCountWithoutServer();
|
||||||
if (count != expected) {
|
if (count != expected) {
|
||||||
ReflectionUtils.printThreadInfo(
|
ReflectionUtils.printThreadInfo(System.err, "Thread dumps");
|
||||||
new PrintWriter(System.err),
|
|
||||||
"Thread dumps");
|
|
||||||
fail("Expected " + expected + " xceivers, found " +
|
fail("Expected " + expected + " xceivers, found " +
|
||||||
count);
|
count);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue