diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 8e381dd686a..2aa917caeff 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -467,6 +467,9 @@ Release 2.0.3-alpha - Unreleased HDFS-4035. LightWeightGSet and LightWeightHashSet increment a volatile without synchronization. (eli) + HDFS-4032. Specify the charset explicitly rather than rely on the + default. (eli) + HDFS-4363. Combine PBHelper and HdfsProtoUtil and remove redundant methods. (suresh) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java index e94d4a8b843..69ef0095db6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java @@ -80,6 +80,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ToolRunner; +import com.google.common.base.Charsets; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; @@ -222,12 +223,7 @@ public class DFSUtil { * Converts a string to a byte array using UTF8 encoding. */ public static byte[] string2Bytes(String str) { - try { - return str.getBytes("UTF8"); - } catch(UnsupportedEncodingException e) { - assert false : "UTF8 encoding is not supported "; - } - return null; + return str.getBytes(Charsets.UTF_8); } /** @@ -239,19 +235,14 @@ public class DFSUtil { if (pathComponents.length == 1 && pathComponents[0].length == 0) { return Path.SEPARATOR; } - try { - StringBuilder result = new StringBuilder(); - for (int i = 0; i < pathComponents.length; i++) { - result.append(new String(pathComponents[i], "UTF-8")); - if (i < pathComponents.length - 1) { - result.append(Path.SEPARATOR_CHAR); - } + StringBuilder result = new StringBuilder(); + for (int i = 0; i < pathComponents.length; i++) { + result.append(new String(pathComponents[i], Charsets.UTF_8)); + if (i < pathComponents.length - 1) { + result.append(Path.SEPARATOR_CHAR); } - return result.toString(); - } catch (UnsupportedEncodingException ex) { - assert false : "UTF8 encoding is not supported "; } - return null; + return result.toString(); } /** Convert an object representing a path to a string. */ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferEncryptor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferEncryptor.java index 229480b927b..f84bdf38ead 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferEncryptor.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/DataTransferEncryptor.java @@ -51,6 +51,7 @@ import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey; import org.apache.hadoop.security.SaslInputStream; import org.apache.hadoop.security.SaslOutputStream; +import com.google.common.base.Charsets; import com.google.common.collect.Maps; import com.google.protobuf.ByteString; @@ -399,7 +400,7 @@ public class DataTransferEncryptor { DataEncryptionKey encryptionKey) { return encryptionKey.keyId + NAME_DELIMITER + encryptionKey.blockPoolId + NAME_DELIMITER + - new String(Base64.encodeBase64(encryptionKey.nonce, false)); + new String(Base64.encodeBase64(encryptionKey.nonce, false), Charsets.UTF_8); } /** @@ -427,7 +428,7 @@ public class DataTransferEncryptor { } private static char[] encryptionKeyToPassword(byte[] encryptionKey) { - return new String(Base64.encodeBase64(encryptionKey, false)).toCharArray(); + return new String(Base64.encodeBase64(encryptionKey, false), Charsets.UTF_8).toCharArray(); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java index 9a15fc1b59d..38a58e89bcc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/Journal.java @@ -58,6 +58,7 @@ import org.apache.hadoop.ipc.Server; import org.apache.hadoop.security.SecurityUtil; import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import com.google.common.base.Stopwatch; import com.google.common.collect.ImmutableList; @@ -934,7 +935,7 @@ class Journal implements Closeable { fos.write('\n'); // Write human-readable data after the protobuf. This is only // to assist in debugging -- it's not parsed at all. - OutputStreamWriter writer = new OutputStreamWriter(fos); + OutputStreamWriter writer = new OutputStreamWriter(fos, Charsets.UTF_8); writer.write(String.valueOf(newData)); writer.write('\n'); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java index 1d9a8f0a1c1..b569ca686e6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java @@ -70,6 +70,8 @@ import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.VersionInfo; +import com.google.common.base.Charsets; + import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER; import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER; @@ -229,7 +231,7 @@ public class JspHelper { } blockReader = null; s.close(); - out.print(HtmlQuoting.quoteHtmlChars(new String(buf))); + out.print(HtmlQuoting.quoteHtmlChars(new String(buf, Charsets.UTF_8))); } public static void addTableHeader(JspWriter out) throws IOException { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java index cef7d237c24..fc69978fb22 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java @@ -44,6 +44,8 @@ import org.apache.hadoop.util.VersionInfo; import com.google.common.base.Preconditions; +import com.google.common.base.Charsets; + /** @@ -658,7 +660,7 @@ public abstract class Storage extends StorageInfo { FileLock res = null; try { res = file.getChannel().tryLock(); - file.write(jvmName.getBytes()); + file.write(jvmName.getBytes(Charsets.UTF_8)); LOG.info("Lock on " + lockF + " acquired by nodename " + jvmName); } catch(OverlappingFileLockException oe) { LOG.error("It appears that another namenode " + file.readLine() diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RollingLogsImpl.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RollingLogsImpl.java index 48a9829d626..94898ee38d5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RollingLogsImpl.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/RollingLogsImpl.java @@ -19,16 +19,20 @@ package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl; import java.io.BufferedReader; import java.io.File; +import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; -import java.io.FileReader; import java.io.IOException; -import java.io.PrintStream; +import java.io.InputStreamReader; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.hdfs.server.datanode.DataBlockScanner; import org.apache.hadoop.hdfs.server.datanode.fsdataset.RollingLogs; +import com.google.common.base.Charsets; + class RollingLogsImpl implements RollingLogs { private static final String CURR_SUFFIX = ".curr"; private static final String PREV_SUFFIX = ".prev"; @@ -40,7 +44,7 @@ class RollingLogsImpl implements RollingLogs { private final File curr; private final File prev; - private PrintStream out; //require synchronized access + private PrintWriter out; //require synchronized access private Appender appender = new Appender() { @Override @@ -82,7 +86,8 @@ class RollingLogsImpl implements RollingLogs { RollingLogsImpl(String dir, String filePrefix) throws FileNotFoundException{ curr = new File(dir, filePrefix + CURR_SUFFIX); prev = new File(dir, filePrefix + PREV_SUFFIX); - out = new PrintStream(new FileOutputStream(curr, true)); + out = new PrintWriter(new OutputStreamWriter(new FileOutputStream( + curr, true), Charsets.UTF_8)); } @Override @@ -108,7 +113,8 @@ class RollingLogsImpl implements RollingLogs { synchronized(this) { appender.close(); final boolean renamed = curr.renameTo(prev); - out = new PrintStream(new FileOutputStream(curr, true)); + out = new PrintWriter(new OutputStreamWriter(new FileOutputStream( + curr, true), Charsets.UTF_8)); if (!renamed) { throw new IOException("Failed to rename " + curr + " to " + prev); } @@ -163,7 +169,8 @@ class RollingLogsImpl implements RollingLogs { reader = null; } - reader = new BufferedReader(new FileReader(file)); + reader = new BufferedReader(new InputStreamReader(new FileInputStream( + file), Charsets.UTF_8)); return true; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper.java index 78fa3d69ad9..1b3db818d15 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/ClusterJspHelper.java @@ -48,6 +48,8 @@ import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.type.TypeReference; import org.znerd.xmlenc.XMLOutputter; +import com.google.common.base.Charsets; + /** * This class generates the data that is needed to be displayed on cluster web * console. @@ -873,7 +875,7 @@ class ClusterJspHelper { URLConnection connection = url.openConnection(); BufferedReader in = new BufferedReader( new InputStreamReader( - connection.getInputStream())); + connection.getInputStream(), Charsets.UTF_8)); String inputLine; while ((inputLine = in.readLine()) != null) { out.append(inputLine); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index 36e4770e6df..258ef9195a0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java @@ -78,8 +78,10 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileNotFoundException; +import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; +import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.StringWriter; import java.lang.management.ManagementFactory; @@ -204,6 +206,7 @@ import org.apache.hadoop.util.VersionInfo; import org.mortbay.util.ajax.JSON; import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Charsets; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; @@ -1089,8 +1092,8 @@ public class FSNamesystem implements Namesystem, FSClusterStats, try { checkSuperuserPrivilege(); File file = new File(System.getProperty("hadoop.log.dir"), filename); - PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file, - true))); + PrintWriter out = new PrintWriter(new BufferedWriter( + new OutputStreamWriter(new FileOutputStream(file, true), Charsets.UTF_8))); metaSave(out); out.flush(); out.close(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RenewDelegationTokenServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RenewDelegationTokenServlet.java index cefd63e66c8..caec7659c8c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RenewDelegationTokenServlet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RenewDelegationTokenServlet.java @@ -17,7 +17,8 @@ package org.apache.hadoop.hdfs.server.namenode; import java.io.IOException; -import java.io.PrintStream; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; import java.security.PrivilegedExceptionAction; import javax.servlet.ServletContext; @@ -32,6 +33,8 @@ import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifie import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; +import com.google.common.base.Charsets; + /** * Renew delegation tokens over http for use in hftp. */ @@ -73,7 +76,8 @@ public class RenewDelegationTokenServlet extends DfsServlet { return nn.getRpcServer().renewDelegationToken(token); } }); - PrintStream os = new PrintStream(resp.getOutputStream()); + final PrintWriter os = new PrintWriter(new OutputStreamWriter( + resp.getOutputStream(), Charsets.UTF_8)); os.println(result); os.close(); } catch(Exception e) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java index 912dee10370..e93f26093d7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java @@ -20,7 +20,8 @@ package org.apache.hadoop.hdfs.server.namenode.web.resources; import java.io.FileNotFoundException; import java.io.IOException; import java.io.OutputStream; -import java.io.PrintStream; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; import java.net.InetAddress; import java.net.URI; import java.net.URISyntaxException; @@ -102,6 +103,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; +import com.google.common.base.Charsets; import com.sun.jersey.spi.container.ResourceFilters; /** Web-hdfs NameNode implementation. */ @@ -713,7 +715,8 @@ public class NamenodeWebHdfsMethods { return new StreamingOutput() { @Override public void write(final OutputStream outstream) throws IOException { - final PrintStream out = new PrintStream(outstream); + final PrintWriter out = new PrintWriter(new OutputStreamWriter( + outstream, Charsets.UTF_8)); out.println("{\"" + FileStatus.class.getSimpleName() + "es\":{\"" + FileStatus.class.getSimpleName() + "\":["); @@ -736,6 +739,7 @@ public class NamenodeWebHdfsMethods { out.println(); out.println("]}}"); + out.flush(); } }; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java index 4c6d888b1d3..cf6f9d503d0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java @@ -55,6 +55,8 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.GenericOptionsParser; +import com.google.common.base.Charsets; + /** * Fetch a DelegationToken from the current Namenode and store it in the * specified file. @@ -269,8 +271,8 @@ public class DelegationTokenFetcher { throw new IOException("Error renewing token: " + connection.getResponseMessage()); } - in = new BufferedReader(new InputStreamReader - (connection.getInputStream())); + in = new BufferedReader( + new InputStreamReader(connection.getInputStream(), Charsets.UTF_8)); long result = Long.parseLong(in.readLine()); in.close(); return result; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java index 0f53415e95c..95cc3b89120 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/OfflineEditsXmlLoader.java @@ -18,9 +18,10 @@ package org.apache.hadoop.hdfs.tools.offlineEditsViewer; import java.io.File; +import java.io.FileInputStream; import java.io.FileNotFoundException; -import java.io.FileReader; import java.io.IOException; +import java.io.InputStreamReader; import java.util.Stack; import org.apache.hadoop.classification.InterfaceAudience; @@ -39,6 +40,8 @@ import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; import org.xml.sax.helpers.XMLReaderFactory; +import com.google.common.base.Charsets; + /** * OfflineEditsXmlLoader walks an EditsVisitor over an OEV XML file */ @@ -48,7 +51,7 @@ class OfflineEditsXmlLoader extends DefaultHandler implements OfflineEditsLoader { private final boolean fixTxIds; private final OfflineEditsVisitor visitor; - private final FileReader fileReader; + private final InputStreamReader fileReader; private ParseState state; private Stanza stanza; private Stack stanzaStack; @@ -70,7 +73,8 @@ class OfflineEditsXmlLoader public OfflineEditsXmlLoader(OfflineEditsVisitor visitor, File inputFile, OfflineEditsViewer.Flags flags) throws FileNotFoundException { this.visitor = visitor; - this.fileReader = new FileReader(inputFile); + this.fileReader = + new InputStreamReader(new FileInputStream(inputFile), Charsets.UTF_8); this.fixTxIds = flags.getFixTxIds(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java index 3fd1dc26a0c..c173e170f0f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineEditsViewer/StatisticsEditsVisitor.java @@ -19,7 +19,8 @@ package org.apache.hadoop.hdfs.tools.offlineEditsViewer; import java.io.IOException; import java.io.OutputStream; -import java.io.PrintStream; +import java.io.OutputStreamWriter; +import java.io.PrintWriter; import java.util.Map; import java.util.HashMap; @@ -29,6 +30,8 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes; +import com.google.common.base.Charsets; + /** * StatisticsEditsVisitor implements text version of EditsVisitor * that aggregates counts of op codes processed @@ -37,7 +40,7 @@ import org.apache.hadoop.hdfs.server.namenode.FSEditLogOpCodes; @InterfaceAudience.Private @InterfaceStability.Unstable public class StatisticsEditsVisitor implements OfflineEditsVisitor { - final private PrintStream out; + final private PrintWriter out; private int version = -1; private final Map opCodeCount = @@ -52,7 +55,7 @@ public class StatisticsEditsVisitor implements OfflineEditsVisitor { * @param printToScreen Mirror output to screen? */ public StatisticsEditsVisitor(OutputStream out) throws IOException { - this.out = new PrintStream(out); + this.out = new PrintWriter(new OutputStreamWriter(out, Charsets.UTF_8)); } /** Start the visitor */ diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java index 88e66df62de..652d0100485 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/offlineImageViewer/TextWriterImageVisitor.java @@ -17,8 +17,12 @@ */ package org.apache.hadoop.hdfs.tools.offlineImageViewer; +import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; +import java.io.OutputStreamWriter; + +import com.google.common.base.Charsets; /** * TextWriterImageProcessor mixes in the ability for ImageVisitor @@ -34,7 +38,7 @@ import java.io.IOException; abstract class TextWriterImageVisitor extends ImageVisitor { private boolean printToScreen = false; private boolean okToWrite = false; - final private FileWriter fw; + final private OutputStreamWriter fw; /** * Create a processor that writes to the file named. @@ -56,7 +60,7 @@ abstract class TextWriterImageVisitor extends ImageVisitor { throws IOException { super(); this.printToScreen = printToScreen; - fw = new FileWriter(filename); + fw = new OutputStreamWriter(new FileOutputStream(filename), Charsets.UTF_8); okToWrite = true; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java index c010e2730a6..0d05be073f3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/MD5FileUtils.java @@ -20,9 +20,9 @@ package org.apache.hadoop.hdfs.util; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; -import java.io.FileReader; import java.io.IOException; import java.io.InputStream; +import java.io.InputStreamReader; import java.security.DigestInputStream; import java.security.MessageDigest; import java.util.regex.Matcher; @@ -34,6 +34,8 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.util.StringUtils; +import com.google.common.base.Charsets; + /** * Static functions for dealing with files of the same format * that the Unix "md5sum" utility writes. @@ -78,7 +80,8 @@ public abstract class MD5FileUtils { } BufferedReader reader = - new BufferedReader(new FileReader(md5File)); + new BufferedReader(new InputStreamReader(new FileInputStream( + md5File), Charsets.UTF_8)); try { md5Line = reader.readLine(); if (md5Line == null) { md5Line = ""; } @@ -138,7 +141,7 @@ public abstract class MD5FileUtils { String md5Line = digestString + " *" + dataFile.getName() + "\n"; AtomicFileOutputStream afos = new AtomicFileOutputStream(md5File); - afos.write(md5Line.getBytes()); + afos.write(md5Line.getBytes(Charsets.UTF_8)); afos.close(); LOG.debug("Saved MD5 " + digest + " to " + md5File); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java index 292d0dfe63e..6ef047dbe9e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/PersistentLongFile.java @@ -19,14 +19,18 @@ package org.apache.hadoop.hdfs.util; import java.io.BufferedReader; import java.io.File; +import java.io.FileInputStream; import java.io.FileReader; import java.io.IOException; +import java.io.InputStreamReader; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.io.IOUtils; +import com.google.common.base.Charsets; + /** * Class that represents a file on disk which persistently stores * a single long value. The file is updated atomically @@ -74,7 +78,7 @@ public class PersistentLongFile { public static void writeFile(File file, long val) throws IOException { AtomicFileOutputStream fos = new AtomicFileOutputStream(file); try { - fos.write(String.valueOf(val).getBytes()); + fos.write(String.valueOf(val).getBytes(Charsets.UTF_8)); fos.write('\n'); fos.close(); fos = null; @@ -88,7 +92,9 @@ public class PersistentLongFile { public static long readFile(File file, long defaultVal) throws IOException { long val = defaultVal; if (file.exists()) { - BufferedReader br = new BufferedReader(new FileReader(file)); + BufferedReader br = + new BufferedReader(new InputStreamReader(new FileInputStream( + file), Charsets.UTF_8)); try { val = Long.valueOf(br.readLine()); br.close(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index 7304b1df966..6c8c29cd0fa 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -105,6 +105,8 @@ import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelect import org.apache.hadoop.util.Progressable; import org.mortbay.util.ajax.JSON; +import com.google.common.base.Charsets; + /** A FileSystem for HDFS over the web. */ public class WebHdfsFileSystem extends FileSystem implements DelegationTokenRenewer.Renewable { @@ -281,7 +283,7 @@ public class WebHdfsFileSystem extends FileSystem + "\" (parsed=\"" + parsed + "\")"); } } - return (Map)JSON.parse(new InputStreamReader(in)); + return (Map)JSON.parse(new InputStreamReader(in, Charsets.UTF_8)); } private static Map validateResponse(final HttpOpParam.Op op, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathComponents.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathComponents.java index 9a712ef28b1..3daabb920c4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathComponents.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestPathComponents.java @@ -25,6 +25,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSUtil; import org.junit.Test; +import com.google.common.base.Charsets; + /** * @@ -45,7 +47,7 @@ public class TestPathComponents { String pathString = str; byte[][] oldPathComponents = INode.getPathComponents(pathString); byte[][] newPathComponents = - DFSUtil.bytes2byteArray(pathString.getBytes("UTF-8"), + DFSUtil.bytes2byteArray(pathString.getBytes(Charsets.UTF_8), (byte) Path.SEPARATOR_CHAR); if (oldPathComponents[0] == null) { assertTrue(oldPathComponents[0] == newPathComponents[0]);