HDFS-4032. Specify the charset explicitly rather than rely on the default. Contributed by Eli Collins

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1431179 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eli Collins 2013-01-10 02:30:05 +00:00
parent 59d9d8bca9
commit 6449f52455
19 changed files with 98 additions and 52 deletions

View File

@ -467,6 +467,9 @@ Release 2.0.3-alpha - Unreleased
HDFS-4035. LightWeightGSet and LightWeightHashSet increment a
volatile without synchronization. (eli)
HDFS-4032. Specify the charset explicitly rather than rely on the
default. (eli)
HDFS-4363. Combine PBHelper and HdfsProtoUtil and remove redundant
methods. (suresh)

View File

@ -80,6 +80,7 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
import com.google.common.base.Charsets;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
@ -222,12 +223,7 @@ public class DFSUtil {
* Converts a string to a byte array using UTF8 encoding.
*/
public static byte[] string2Bytes(String str) {
try {
return str.getBytes("UTF8");
} catch(UnsupportedEncodingException e) {
assert false : "UTF8 encoding is not supported ";
}
return null;
return str.getBytes(Charsets.UTF_8);
}
/**
@ -239,19 +235,14 @@ public class DFSUtil {
if (pathComponents.length == 1 && pathComponents[0].length == 0) {
return Path.SEPARATOR;
}
try {
StringBuilder result = new StringBuilder();
for (int i = 0; i < pathComponents.length; i++) {
result.append(new String(pathComponents[i], "UTF-8"));
result.append(new String(pathComponents[i], Charsets.UTF_8));
if (i < pathComponents.length - 1) {
result.append(Path.SEPARATOR_CHAR);
}
}
return result.toString();
} catch (UnsupportedEncodingException ex) {
assert false : "UTF8 encoding is not supported ";
}
return null;
}
/** Convert an object representing a path to a string. */

View File

@ -51,6 +51,7 @@ import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
import org.apache.hadoop.security.SaslInputStream;
import org.apache.hadoop.security.SaslOutputStream;
import com.google.common.base.Charsets;
import com.google.common.collect.Maps;
import com.google.protobuf.ByteString;
@ -399,7 +400,7 @@ public class DataTransferEncryptor {
DataEncryptionKey encryptionKey) {
return encryptionKey.keyId + NAME_DELIMITER +
encryptionKey.blockPoolId + NAME_DELIMITER +
new String(Base64.encodeBase64(encryptionKey.nonce, false));
new String(Base64.encodeBase64(encryptionKey.nonce, false), Charsets.UTF_8);
}
/**
@ -427,7 +428,7 @@ public class DataTransferEncryptor {
}
private static char[] encryptionKeyToPassword(byte[] encryptionKey) {
return new String(Base64.encodeBase64(encryptionKey, false)).toCharArray();
return new String(Base64.encodeBase64(encryptionKey, false), Charsets.UTF_8).toCharArray();
}
/**

View File

@ -58,6 +58,7 @@ import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.security.SecurityUtil;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ImmutableList;
@ -934,7 +935,7 @@ class Journal implements Closeable {
fos.write('\n');
// Write human-readable data after the protobuf. This is only
// to assist in debugging -- it's not parsed at all.
OutputStreamWriter writer = new OutputStreamWriter(fos);
OutputStreamWriter writer = new OutputStreamWriter(fos, Charsets.UTF_8);
writer.write(String.valueOf(newData));
writer.write('\n');

View File

@ -70,6 +70,8 @@ import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.VersionInfo;
import com.google.common.base.Charsets;
import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
@ -229,7 +231,7 @@ public class JspHelper {
}
blockReader = null;
s.close();
out.print(HtmlQuoting.quoteHtmlChars(new String(buf)));
out.print(HtmlQuoting.quoteHtmlChars(new String(buf, Charsets.UTF_8)));
}
public static void addTableHeader(JspWriter out) throws IOException {

View File

@ -44,6 +44,8 @@ import org.apache.hadoop.util.VersionInfo;
import com.google.common.base.Preconditions;
import com.google.common.base.Charsets;
/**
@ -658,7 +660,7 @@ public abstract class Storage extends StorageInfo {
FileLock res = null;
try {
res = file.getChannel().tryLock();
file.write(jvmName.getBytes());
file.write(jvmName.getBytes(Charsets.UTF_8));
LOG.info("Lock on " + lockF + " acquired by nodename " + jvmName);
} catch(OverlappingFileLockException oe) {
LOG.error("It appears that another namenode " + file.readLine()

View File

@ -19,16 +19,20 @@ package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.hdfs.server.datanode.DataBlockScanner;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.RollingLogs;
import com.google.common.base.Charsets;
class RollingLogsImpl implements RollingLogs {
private static final String CURR_SUFFIX = ".curr";
private static final String PREV_SUFFIX = ".prev";
@ -40,7 +44,7 @@ class RollingLogsImpl implements RollingLogs {
private final File curr;
private final File prev;
private PrintStream out; //require synchronized access
private PrintWriter out; //require synchronized access
private Appender appender = new Appender() {
@Override
@ -82,7 +86,8 @@ class RollingLogsImpl implements RollingLogs {
RollingLogsImpl(String dir, String filePrefix) throws FileNotFoundException{
curr = new File(dir, filePrefix + CURR_SUFFIX);
prev = new File(dir, filePrefix + PREV_SUFFIX);
out = new PrintStream(new FileOutputStream(curr, true));
out = new PrintWriter(new OutputStreamWriter(new FileOutputStream(
curr, true), Charsets.UTF_8));
}
@Override
@ -108,7 +113,8 @@ class RollingLogsImpl implements RollingLogs {
synchronized(this) {
appender.close();
final boolean renamed = curr.renameTo(prev);
out = new PrintStream(new FileOutputStream(curr, true));
out = new PrintWriter(new OutputStreamWriter(new FileOutputStream(
curr, true), Charsets.UTF_8));
if (!renamed) {
throw new IOException("Failed to rename " + curr + " to " + prev);
}
@ -163,7 +169,8 @@ class RollingLogsImpl implements RollingLogs {
reader = null;
}
reader = new BufferedReader(new FileReader(file));
reader = new BufferedReader(new InputStreamReader(new FileInputStream(
file), Charsets.UTF_8));
return true;
}

View File

@ -48,6 +48,8 @@ import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.type.TypeReference;
import org.znerd.xmlenc.XMLOutputter;
import com.google.common.base.Charsets;
/**
* This class generates the data that is needed to be displayed on cluster web
* console.
@ -873,7 +875,7 @@ class ClusterJspHelper {
URLConnection connection = url.openConnection();
BufferedReader in = new BufferedReader(
new InputStreamReader(
connection.getInputStream()));
connection.getInputStream(), Charsets.UTF_8));
String inputLine;
while ((inputLine = in.readLine()) != null) {
out.append(inputLine);

View File

@ -78,8 +78,10 @@ import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.lang.management.ManagementFactory;
@ -204,6 +206,7 @@ import org.apache.hadoop.util.VersionInfo;
import org.mortbay.util.ajax.JSON;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
@ -1089,8 +1092,8 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
try {
checkSuperuserPrivilege();
File file = new File(System.getProperty("hadoop.log.dir"), filename);
PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file,
true)));
PrintWriter out = new PrintWriter(new BufferedWriter(
new OutputStreamWriter(new FileOutputStream(file, true), Charsets.UTF_8)));
metaSave(out);
out.flush();
out.close();

View File

@ -17,7 +17,8 @@
package org.apache.hadoop.hdfs.server.namenode;
import java.io.IOException;
import java.io.PrintStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.security.PrivilegedExceptionAction;
import javax.servlet.ServletContext;
@ -32,6 +33,8 @@ import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifie
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import com.google.common.base.Charsets;
/**
* Renew delegation tokens over http for use in hftp.
*/
@ -73,7 +76,8 @@ public class RenewDelegationTokenServlet extends DfsServlet {
return nn.getRpcServer().renewDelegationToken(token);
}
});
PrintStream os = new PrintStream(resp.getOutputStream());
final PrintWriter os = new PrintWriter(new OutputStreamWriter(
resp.getOutputStream(), Charsets.UTF_8));
os.println(result);
os.close();
} catch(Exception e) {

View File

@ -20,7 +20,8 @@ package org.apache.hadoop.hdfs.server.namenode.web.resources;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
@ -102,6 +103,7 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import com.google.common.base.Charsets;
import com.sun.jersey.spi.container.ResourceFilters;
/** Web-hdfs NameNode implementation. */
@ -713,7 +715,8 @@ public class NamenodeWebHdfsMethods {
return new StreamingOutput() {
@Override
public void write(final OutputStream outstream) throws IOException {
final PrintStream out = new PrintStream(outstream);
final PrintWriter out = new PrintWriter(new OutputStreamWriter(
outstream, Charsets.UTF_8));
out.println("{\"" + FileStatus.class.getSimpleName() + "es\":{\""
+ FileStatus.class.getSimpleName() + "\":[");
@ -736,6 +739,7 @@ public class NamenodeWebHdfsMethods {
out.println();
out.println("]}}");
out.flush();
}
};
}

View File

@ -55,6 +55,8 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.GenericOptionsParser;
import com.google.common.base.Charsets;
/**
* Fetch a DelegationToken from the current Namenode and store it in the
* specified file.
@ -269,8 +271,8 @@ public class DelegationTokenFetcher {
throw new IOException("Error renewing token: " +
connection.getResponseMessage());
}
in = new BufferedReader(new InputStreamReader
(connection.getInputStream()));
in = new BufferedReader(
new InputStreamReader(connection.getInputStream(), Charsets.UTF_8));
long result = Long.parseLong(in.readLine());
in.close();
return result;

View File

@ -18,9 +18,10 @@
package org.apache.hadoop.hdfs.tools.offlineEditsViewer;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Stack;
import org.apache.hadoop.classification.InterfaceAudience;
@ -39,6 +40,8 @@ import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import com.google.common.base.Charsets;
/**
* OfflineEditsXmlLoader walks an EditsVisitor over an OEV XML file
*/
@ -48,7 +51,7 @@ class OfflineEditsXmlLoader
extends DefaultHandler implements OfflineEditsLoader {
private final boolean fixTxIds;
private final OfflineEditsVisitor visitor;
private final FileReader fileReader;
private final InputStreamReader fileReader;
private ParseState state;
private Stanza stanza;
private Stack<Stanza> stanzaStack;
@ -70,7 +73,8 @@ class OfflineEditsXmlLoader
public OfflineEditsXmlLoader(OfflineEditsVisitor visitor,
File inputFile, OfflineEditsViewer.Flags flags) throws FileNotFoundException {
this.visitor = visitor;
this.fileReader = new FileReader(inputFile);
this.fileReader =
new InputStreamReader(new FileInputStream(inputFile), Charsets.UTF_8);
this.fixTxIds = flags.getFixTxIds();
}

View File

@ -19,7 +19,8 @@ package org.apache.hadoop.hdfs.tools.offlineEditsViewer;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.util.Map;
import java.util.HashMap;
@ -29,6 +30,8 @@ import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes;
import com.google.common.base.Charsets;
/**
* StatisticsEditsVisitor implements text version of EditsVisitor
* that aggregates counts of op codes processed
@ -37,7 +40,7 @@ import org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes;
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class StatisticsEditsVisitor implements OfflineEditsVisitor {
final private PrintStream out;
final private PrintWriter out;
private int version = -1;
private final Map<FSEditLogOpCodes, Long> opCodeCount =
@ -52,7 +55,7 @@ public class StatisticsEditsVisitor implements OfflineEditsVisitor {
* @param printToScreen Mirror output to screen?
*/
public StatisticsEditsVisitor(OutputStream out) throws IOException {
this.out = new PrintStream(out);
this.out = new PrintWriter(new OutputStreamWriter(out, Charsets.UTF_8));
}
/** Start the visitor */

View File

@ -17,8 +17,12 @@
*/
package org.apache.hadoop.hdfs.tools.offlineImageViewer;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import com.google.common.base.Charsets;
/**
* TextWriterImageProcessor mixes in the ability for ImageVisitor
@ -34,7 +38,7 @@ import java.io.IOException;
abstract class TextWriterImageVisitor extends ImageVisitor {
private boolean printToScreen = false;
private boolean okToWrite = false;
final private FileWriter fw;
final private OutputStreamWriter fw;
/**
* Create a processor that writes to the file named.
@ -56,7 +60,7 @@ abstract class TextWriterImageVisitor extends ImageVisitor {
throws IOException {
super();
this.printToScreen = printToScreen;
fw = new FileWriter(filename);
fw = new OutputStreamWriter(new FileOutputStream(filename), Charsets.UTF_8);
okToWrite = true;
}

View File

@ -20,9 +20,9 @@ package org.apache.hadoop.hdfs.util;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.util.regex.Matcher;
@ -34,6 +34,8 @@ import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.util.StringUtils;
import com.google.common.base.Charsets;
/**
* Static functions for dealing with files of the same format
* that the Unix "md5sum" utility writes.
@ -78,7 +80,8 @@ public abstract class MD5FileUtils {
}
BufferedReader reader =
new BufferedReader(new FileReader(md5File));
new BufferedReader(new InputStreamReader(new FileInputStream(
md5File), Charsets.UTF_8));
try {
md5Line = reader.readLine();
if (md5Line == null) { md5Line = ""; }
@ -138,7 +141,7 @@ public abstract class MD5FileUtils {
String md5Line = digestString + " *" + dataFile.getName() + "\n";
AtomicFileOutputStream afos = new AtomicFileOutputStream(md5File);
afos.write(md5Line.getBytes());
afos.write(md5Line.getBytes(Charsets.UTF_8));
afos.close();
LOG.debug("Saved MD5 " + digest + " to " + md5File);
}

View File

@ -19,14 +19,18 @@ package org.apache.hadoop.hdfs.util;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.IOUtils;
import com.google.common.base.Charsets;
/**
* Class that represents a file on disk which persistently stores
* a single <code>long</code> value. The file is updated atomically
@ -74,7 +78,7 @@ public class PersistentLongFile {
public static void writeFile(File file, long val) throws IOException {
AtomicFileOutputStream fos = new AtomicFileOutputStream(file);
try {
fos.write(String.valueOf(val).getBytes());
fos.write(String.valueOf(val).getBytes(Charsets.UTF_8));
fos.write('\n');
fos.close();
fos = null;
@ -88,7 +92,9 @@ public class PersistentLongFile {
public static long readFile(File file, long defaultVal) throws IOException {
long val = defaultVal;
if (file.exists()) {
BufferedReader br = new BufferedReader(new FileReader(file));
BufferedReader br =
new BufferedReader(new InputStreamReader(new FileInputStream(
file), Charsets.UTF_8));
try {
val = Long.valueOf(br.readLine());
br.close();

View File

@ -105,6 +105,8 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelect
import org.apache.hadoop.util.Progressable;
import org.mortbay.util.ajax.JSON;
import com.google.common.base.Charsets;
/** A FileSystem for HDFS over the web. */
public class WebHdfsFileSystem extends FileSystem
implements DelegationTokenRenewer.Renewable {
@ -281,7 +283,7 @@ public class WebHdfsFileSystem extends FileSystem
+ "\" (parsed=\"" + parsed + "\")");
}
}
return (Map<?, ?>)JSON.parse(new InputStreamReader(in));
return (Map<?, ?>)JSON.parse(new InputStreamReader(in, Charsets.UTF_8));
}
private static Map<?, ?> validateResponse(final HttpOpParam.Op op,

View File

@ -25,6 +25,8 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSUtil;
import org.junit.Test;
import com.google.common.base.Charsets;
/**
*
@ -45,7 +47,7 @@ public class TestPathComponents {
String pathString = str;
byte[][] oldPathComponents = INode.getPathComponents(pathString);
byte[][] newPathComponents =
DFSUtil.bytes2byteArray(pathString.getBytes("UTF-8"),
DFSUtil.bytes2byteArray(pathString.getBytes(Charsets.UTF_8),
(byte) Path.SEPARATOR_CHAR);
if (oldPathComponents[0] == null) {
assertTrue(oldPathComponents[0] == newPathComponents[0]);