HDFS-10662. Optimize UTF8 string/byte conversions. Contributed by Daryn Sharp.

This commit is contained in:
Kihwal Lee 2016-08-04 09:07:34 -05:00
parent 70c2781152
commit 6ae39199da
6 changed files with 24 additions and 24 deletions

View File

@ -91,11 +91,21 @@ public class DFSUtilClient {
public static final byte[] EMPTY_BYTES = {};
private static final Logger LOG = LoggerFactory.getLogger(
DFSUtilClient.class);
// Using the charset canonical name for String/byte[] conversions is much
// more efficient due to use of cached encoders/decoders.
private static final String UTF8_CSN = StandardCharsets.UTF_8.name();
/**
* Converts a string to a byte array using UTF8 encoding.
*/
public static byte[] string2Bytes(String str) {
return str.getBytes(StandardCharsets.UTF_8);
try {
return str.getBytes(UTF8_CSN);
} catch (UnsupportedEncodingException e) {
// should never happen!
throw new IllegalArgumentException("UTF8 decoding is not supported", e);
}
}
/**
@ -281,13 +291,13 @@ public class DFSUtilClient {
* @param length The number of bytes to decode
* @return The decoded string
*/
private static String bytes2String(byte[] bytes, int offset, int length) {
static String bytes2String(byte[] bytes, int offset, int length) {
try {
return new String(bytes, offset, length, "UTF8");
} catch(UnsupportedEncodingException e) {
assert false : "UTF8 encoding is not supported ";
return new String(bytes, offset, length, UTF8_CSN);
} catch (UnsupportedEncodingException e) {
// should never happen!
throw new IllegalArgumentException("UTF8 encoding is not supported", e);
}
return null;
}
/**

View File

@ -38,7 +38,6 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_TRUSTSTORE_P
import java.io.IOException;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URI;
@ -248,12 +247,7 @@ public class DFSUtil {
* @return The decoded string
*/
public static String bytes2String(byte[] bytes, int offset, int length) {
try {
return new String(bytes, offset, length, "UTF8");
} catch(UnsupportedEncodingException e) {
assert false : "UTF8 encoding is not supported ";
}
return null;
return DFSUtilClient.bytes2String(bytes, 0, bytes.length);
}
/**

View File

@ -32,7 +32,6 @@ import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.AbstractMap;
import java.util.List;
import java.util.Map;
@ -121,8 +120,7 @@ class FSDirMkdirOp {
static Map.Entry<INodesInPath, String> createAncestorDirectories(
FSDirectory fsd, INodesInPath iip, PermissionStatus permission)
throws IOException {
final String last =
new String(iip.getLastLocalName(), StandardCharsets.UTF_8);
final String last = DFSUtil.bytes2String(iip.getLastLocalName());
INodesInPath existing = iip.getExistingINodes();
List<String> children = iip.getPath(existing.length(),
iip.length() - existing.length());
@ -190,7 +188,7 @@ class FSDirMkdirOp {
throws IOException {
assert fsd.hasWriteLock();
existing = unprotectedMkdir(fsd, fsd.allocateNewInodeId(), existing,
localName.getBytes(StandardCharsets.UTF_8), perm, null, now());
DFSUtil.string2Bytes(localName), perm, null, now());
if (existing == null) {
return null;
}

View File

@ -45,7 +45,6 @@ import org.apache.hadoop.hdfs.util.ReadOnlyList;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import static org.apache.hadoop.util.Time.now;
@ -55,8 +54,7 @@ class FSDirStatAndListingOp {
byte[] startAfter, boolean needLocation) throws IOException {
byte[][] pathComponents = FSDirectory
.getPathComponentsForReservedPath(srcArg);
final String startAfterString =
new String(startAfter, StandardCharsets.UTF_8);
final String startAfterString = DFSUtil.bytes2String(startAfter);
String src = null;
if (fsd.isPermissionEnabled()) {

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.PermissionStatus;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
@ -59,7 +60,6 @@ import org.apache.hadoop.util.ChunkedArrayList;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -610,7 +610,7 @@ class FSDirWriteFileOp {
}
INodeFile newNode = newINodeFile(fsd.allocateNewInodeId(), permissions,
modTime, modTime, replication, preferredBlockSize, ecPolicy != null);
newNode.setLocalName(localName.getBytes(StandardCharsets.UTF_8));
newNode.setLocalName(DFSUtil.string2Bytes(localName));
newNode.toUnderConstruction(clientName, clientMachine);
newiip = fsd.addINode(existing, newNode);
} finally {

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hdfs.server.namenode;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import org.apache.hadoop.HadoopIllegalArgumentException;
@ -26,6 +25,7 @@ import org.apache.hadoop.fs.XAttr;
import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.XAttrHelper;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
@ -410,7 +410,7 @@ class FSDirXAttrOp {
* the configured limit. Setting a limit of zero disables this check.
*/
private static void checkXAttrSize(FSDirectory fsd, XAttr xAttr) {
int size = xAttr.getName().getBytes(Charsets.UTF_8).length;
int size = DFSUtil.string2Bytes(xAttr.getName()).length;
if (xAttr.getValue() != null) {
size += xAttr.getValue().length;
}