HDFS-8237. Move all protocol classes used by ClientProtocol to hdfs-client. Contributed by Haohui Mai.
This commit is contained in:
parent
bf70c5ae28
commit
0d6aa5d609
|
@ -11,6 +11,9 @@
|
||||||
<Class name="org.apache.hadoop.hdfs.protocol.DirectoryListing"/>
|
<Class name="org.apache.hadoop.hdfs.protocol.DirectoryListing"/>
|
||||||
<Class name="org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier"/>
|
<Class name="org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier"/>
|
||||||
<Class name="org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey"/>
|
<Class name="org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey"/>
|
||||||
|
<Class name="org.apache.hadoop.hdfs.protocol.SnapshotDiffReport$DiffReportEntry"/>
|
||||||
|
<Class name="org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus"/>
|
||||||
|
<Class name="org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport"/>
|
||||||
</Or>
|
</Or>
|
||||||
<Bug pattern="EI_EXPOSE_REP,EI_EXPOSE_REP2" />
|
<Bug pattern="EI_EXPOSE_REP,EI_EXPOSE_REP2" />
|
||||||
</Match>
|
</Match>
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs;
|
||||||
|
|
||||||
import com.google.common.base.Joiner;
|
import com.google.common.base.Joiner;
|
||||||
import com.google.common.collect.Maps;
|
import com.google.common.collect.Maps;
|
||||||
|
import com.google.common.primitives.SignedBytes;
|
||||||
import org.apache.commons.io.Charsets;
|
import org.apache.commons.io.Charsets;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.BlockLocation;
|
import org.apache.hadoop.fs.BlockLocation;
|
||||||
|
@ -36,15 +37,19 @@ import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
|
import java.text.SimpleDateFormat;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX;
|
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX;
|
||||||
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_NAMESERVICES;
|
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_NAMESERVICES;
|
||||||
|
|
||||||
public class DFSUtilClient {
|
public class DFSUtilClient {
|
||||||
|
public static final byte[] EMPTY_BYTES = {};
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(
|
private static final Logger LOG = LoggerFactory.getLogger(
|
||||||
DFSUtilClient.class);
|
DFSUtilClient.class);
|
||||||
/**
|
/**
|
||||||
|
@ -184,6 +189,48 @@ public class DFSUtilClient {
|
||||||
return blkLocations;
|
return blkLocations;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Compare two byte arrays by lexicographical order. */
|
||||||
|
public static int compareBytes(byte[] left, byte[] right) {
|
||||||
|
if (left == null) {
|
||||||
|
left = EMPTY_BYTES;
|
||||||
|
}
|
||||||
|
if (right == null) {
|
||||||
|
right = EMPTY_BYTES;
|
||||||
|
}
|
||||||
|
return SignedBytes.lexicographicalComparator().compare(left, right);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given a list of path components returns a byte array
|
||||||
|
*/
|
||||||
|
public static byte[] byteArray2bytes(byte[][] pathComponents) {
|
||||||
|
if (pathComponents.length == 0) {
|
||||||
|
return EMPTY_BYTES;
|
||||||
|
} else if (pathComponents.length == 1
|
||||||
|
&& (pathComponents[0] == null || pathComponents[0].length == 0)) {
|
||||||
|
return new byte[]{(byte) Path.SEPARATOR_CHAR};
|
||||||
|
}
|
||||||
|
int length = 0;
|
||||||
|
for (int i = 0; i < pathComponents.length; i++) {
|
||||||
|
length += pathComponents[i].length;
|
||||||
|
if (i < pathComponents.length - 1) {
|
||||||
|
length++; // for SEPARATOR
|
||||||
|
}
|
||||||
|
}
|
||||||
|
byte[] path = new byte[length];
|
||||||
|
int index = 0;
|
||||||
|
for (int i = 0; i < pathComponents.length; i++) {
|
||||||
|
System.arraycopy(pathComponents[i], 0, path, index,
|
||||||
|
pathComponents[i].length);
|
||||||
|
index += pathComponents[i].length;
|
||||||
|
if (i < pathComponents.length - 1) {
|
||||||
|
path[index] = (byte) Path.SEPARATOR_CHAR;
|
||||||
|
index++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return path;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Decode a specific range of bytes of the given byte array to a string
|
* Decode a specific range of bytes of the given byte array to a string
|
||||||
* using UTF8.
|
* using UTF8.
|
||||||
|
@ -343,4 +390,42 @@ public class DFSUtilClient {
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a time duration in milliseconds into DDD:HH:MM:SS format.
|
||||||
|
*/
|
||||||
|
public static String durationToString(long durationMs) {
|
||||||
|
boolean negative = false;
|
||||||
|
if (durationMs < 0) {
|
||||||
|
negative = true;
|
||||||
|
durationMs = -durationMs;
|
||||||
|
}
|
||||||
|
// Chop off the milliseconds
|
||||||
|
long durationSec = durationMs / 1000;
|
||||||
|
final int secondsPerMinute = 60;
|
||||||
|
final int secondsPerHour = 60*60;
|
||||||
|
final int secondsPerDay = 60*60*24;
|
||||||
|
final long days = durationSec / secondsPerDay;
|
||||||
|
durationSec -= days * secondsPerDay;
|
||||||
|
final long hours = durationSec / secondsPerHour;
|
||||||
|
durationSec -= hours * secondsPerHour;
|
||||||
|
final long minutes = durationSec / secondsPerMinute;
|
||||||
|
durationSec -= minutes * secondsPerMinute;
|
||||||
|
final long seconds = durationSec;
|
||||||
|
final long milliseconds = durationMs % 1000;
|
||||||
|
String format = "%03d:%02d:%02d:%02d.%03d";
|
||||||
|
if (negative) {
|
||||||
|
format = "-" + format;
|
||||||
|
}
|
||||||
|
return String.format(format, days, hours, minutes, seconds, milliseconds);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a Date into an ISO-8601 formatted datetime string.
|
||||||
|
*/
|
||||||
|
public static String dateToIso8601String(Date date) {
|
||||||
|
SimpleDateFormat df =
|
||||||
|
new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ", Locale.ENGLISH);
|
||||||
|
return df.format(date);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,6 +38,7 @@ public interface HdfsClientConfigKeys {
|
||||||
int DFS_NAMENODE_HTTPS_PORT_DEFAULT = 50470;
|
int DFS_NAMENODE_HTTPS_PORT_DEFAULT = 50470;
|
||||||
String DFS_NAMENODE_HTTPS_ADDRESS_KEY = "dfs.namenode.https-address";
|
String DFS_NAMENODE_HTTPS_ADDRESS_KEY = "dfs.namenode.https-address";
|
||||||
String DFS_HA_NAMENODES_KEY_PREFIX = "dfs.ha.namenodes";
|
String DFS_HA_NAMENODES_KEY_PREFIX = "dfs.ha.namenodes";
|
||||||
|
int DFS_NAMENODE_RPC_PORT_DEFAULT = 8020;
|
||||||
|
|
||||||
/** dfs.client.retry configuration properties */
|
/** dfs.client.retry configuration properties */
|
||||||
interface Retry {
|
interface Retry {
|
||||||
|
|
|
@ -24,9 +24,9 @@ import org.apache.commons.lang.builder.HashCodeBuilder;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
|
||||||
|
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
|
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Describes a path-based cache directive.
|
* Describes a path-based cache directive.
|
||||||
|
@ -244,9 +244,9 @@ public class CacheDirectiveInfo {
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
if (isRelative) {
|
if (isRelative) {
|
||||||
return DFSUtil.durationToString(ms);
|
return DFSUtilClient.durationToString(ms);
|
||||||
}
|
}
|
||||||
return DFSUtil.dateToIso8601String(new Date(ms));
|
return DFSUtilClient.dateToIso8601String(new Date(ms));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,9 +22,9 @@ import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
|
||||||
|
|
||||||
import com.google.common.base.Objects;
|
import com.google.common.base.Objects;
|
||||||
|
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class represents to end users the difference between two snapshots of
|
* This class represents to end users the difference between two snapshots of
|
||||||
|
@ -102,8 +102,8 @@ public class SnapshotDiffReport {
|
||||||
public DiffReportEntry(DiffType type, byte[][] sourcePathComponents,
|
public DiffReportEntry(DiffType type, byte[][] sourcePathComponents,
|
||||||
byte[][] targetPathComponents) {
|
byte[][] targetPathComponents) {
|
||||||
this.type = type;
|
this.type = type;
|
||||||
this.sourcePath = DFSUtil.byteArray2bytes(sourcePathComponents);
|
this.sourcePath = DFSUtilClient.byteArray2bytes(sourcePathComponents);
|
||||||
this.targetPath = targetPathComponents == null ? null : DFSUtil
|
this.targetPath = targetPathComponents == null ? null : DFSUtilClient
|
||||||
.byteArray2bytes(targetPathComponents);
|
.byteArray2bytes(targetPathComponents);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,7 +121,7 @@ public class SnapshotDiffReport {
|
||||||
}
|
}
|
||||||
|
|
||||||
static String getPathString(byte[] path) {
|
static String getPathString(byte[] path) {
|
||||||
String pathStr = DFSUtil.bytes2String(path);
|
String pathStr = DFSUtilClient.bytes2String(path);
|
||||||
if (pathStr.isEmpty()) {
|
if (pathStr.isEmpty()) {
|
||||||
return Path.CUR_DIR;
|
return Path.CUR_DIR;
|
||||||
} else {
|
} else {
|
|
@ -24,7 +24,7 @@ import java.util.Date;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Metadata about a snapshottable directory
|
* Metadata about a snapshottable directory
|
||||||
|
@ -36,9 +36,9 @@ public class SnapshottableDirectoryStatus {
|
||||||
@Override
|
@Override
|
||||||
public int compare(SnapshottableDirectoryStatus left,
|
public int compare(SnapshottableDirectoryStatus left,
|
||||||
SnapshottableDirectoryStatus right) {
|
SnapshottableDirectoryStatus right) {
|
||||||
int d = DFSUtil.compareBytes(left.parentFullPath, right.parentFullPath);
|
int d = DFSUtilClient.compareBytes(left.parentFullPath, right.parentFullPath);
|
||||||
return d != 0? d
|
return d != 0? d
|
||||||
: DFSUtil.compareBytes(left.dirStatus.getLocalNameInBytes(),
|
: DFSUtilClient.compareBytes(left.dirStatus.getLocalNameInBytes(),
|
||||||
right.dirStatus.getLocalNameInBytes());
|
right.dirStatus.getLocalNameInBytes());
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -101,7 +101,7 @@ public class SnapshottableDirectoryStatus {
|
||||||
public Path getFullPath() {
|
public Path getFullPath() {
|
||||||
String parentFullPathStr =
|
String parentFullPathStr =
|
||||||
(parentFullPath == null || parentFullPath.length == 0) ?
|
(parentFullPath == null || parentFullPath.length == 0) ?
|
||||||
null : DFSUtil.bytes2String(parentFullPath);
|
null : DFSUtilClient.bytes2String(parentFullPath);
|
||||||
if (parentFullPathStr == null
|
if (parentFullPathStr == null
|
||||||
&& dirStatus.getLocalNameInBytes().length == 0) {
|
&& dirStatus.getLocalNameInBytes().length == 0) {
|
||||||
// root
|
// root
|
|
@ -22,7 +22,7 @@ import java.util.Collection;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
import org.apache.hadoop.security.SecurityUtil;
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
|
@ -57,7 +57,7 @@ public class DelegationTokenSelector
|
||||||
Text serviceName = SecurityUtil.buildTokenService(nnUri);
|
Text serviceName = SecurityUtil.buildTokenService(nnUri);
|
||||||
final String nnServiceName = conf.get(SERVICE_NAME_KEY + serviceName);
|
final String nnServiceName = conf.get(SERVICE_NAME_KEY + serviceName);
|
||||||
|
|
||||||
int nnRpcPort = NameNode.DEFAULT_PORT;
|
int nnRpcPort = HdfsClientConfigKeys.DFS_NAMENODE_RPC_PORT_DEFAULT;
|
||||||
if (nnServiceName != null) {
|
if (nnServiceName != null) {
|
||||||
nnRpcPort = NetUtils.createSocketAddr(nnServiceName, nnRpcPort).getPort();
|
nnRpcPort = NetUtils.createSocketAddr(nnServiceName, nnRpcPort).getPort();
|
||||||
}
|
}
|
|
@ -504,6 +504,9 @@ Release 2.8.0 - UNRELEASED
|
||||||
"dfs.client.read.shortcircuit.streams.cache.size" (Brahma Reddy Battula via
|
"dfs.client.read.shortcircuit.streams.cache.size" (Brahma Reddy Battula via
|
||||||
Colin P. McCabe)
|
Colin P. McCabe)
|
||||||
|
|
||||||
|
HDFS-8237. Move all protocol classes used by ClientProtocol to hdfs-client.
|
||||||
|
(wheat9)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than
|
HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than
|
||||||
|
|
|
@ -43,14 +43,12 @@ import java.net.InetSocketAddress;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.security.SecureRandom;
|
import java.security.SecureRandom;
|
||||||
import java.text.SimpleDateFormat;
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -97,26 +95,12 @@ import com.google.common.base.Charsets;
|
||||||
import com.google.common.base.Joiner;
|
import com.google.common.base.Joiner;
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
import com.google.common.primitives.SignedBytes;
|
|
||||||
import com.google.protobuf.BlockingService;
|
import com.google.protobuf.BlockingService;
|
||||||
|
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class DFSUtil {
|
public class DFSUtil {
|
||||||
public static final Log LOG = LogFactory.getLog(DFSUtil.class.getName());
|
public static final Log LOG = LogFactory.getLog(DFSUtil.class.getName());
|
||||||
|
|
||||||
public static final byte[] EMPTY_BYTES = {};
|
|
||||||
|
|
||||||
/** Compare two byte arrays by lexicographical order. */
|
|
||||||
public static int compareBytes(byte[] left, byte[] right) {
|
|
||||||
if (left == null) {
|
|
||||||
left = EMPTY_BYTES;
|
|
||||||
}
|
|
||||||
if (right == null) {
|
|
||||||
right = EMPTY_BYTES;
|
|
||||||
}
|
|
||||||
return SignedBytes.lexicographicalComparator().compare(left, right);
|
|
||||||
}
|
|
||||||
|
|
||||||
private DFSUtil() { /* Hidden constructor */ }
|
private DFSUtil() { /* Hidden constructor */ }
|
||||||
private static final ThreadLocal<Random> RANDOM = new ThreadLocal<Random>() {
|
private static final ThreadLocal<Random> RANDOM = new ThreadLocal<Random>() {
|
||||||
@Override
|
@Override
|
||||||
|
@ -345,37 +329,6 @@ public class DFSUtil {
|
||||||
return Joiner.on(Path.SEPARATOR).join(components);
|
return Joiner.on(Path.SEPARATOR).join(components);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Given a list of path components returns a byte array
|
|
||||||
*/
|
|
||||||
public static byte[] byteArray2bytes(byte[][] pathComponents) {
|
|
||||||
if (pathComponents.length == 0) {
|
|
||||||
return EMPTY_BYTES;
|
|
||||||
} else if (pathComponents.length == 1
|
|
||||||
&& (pathComponents[0] == null || pathComponents[0].length == 0)) {
|
|
||||||
return new byte[]{(byte) Path.SEPARATOR_CHAR};
|
|
||||||
}
|
|
||||||
int length = 0;
|
|
||||||
for (int i = 0; i < pathComponents.length; i++) {
|
|
||||||
length += pathComponents[i].length;
|
|
||||||
if (i < pathComponents.length - 1) {
|
|
||||||
length++; // for SEPARATOR
|
|
||||||
}
|
|
||||||
}
|
|
||||||
byte[] path = new byte[length];
|
|
||||||
int index = 0;
|
|
||||||
for (int i = 0; i < pathComponents.length; i++) {
|
|
||||||
System.arraycopy(pathComponents[i], 0, path, index,
|
|
||||||
pathComponents[i].length);
|
|
||||||
index += pathComponents[i].length;
|
|
||||||
if (i < pathComponents.length - 1) {
|
|
||||||
path[index] = (byte) Path.SEPARATOR_CHAR;
|
|
||||||
index++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return path;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Convert an object representing a path to a string. */
|
/** Convert an object representing a path to a string. */
|
||||||
public static String path2String(final Object path) {
|
public static String path2String(final Object path) {
|
||||||
return path == null? null
|
return path == null? null
|
||||||
|
@ -1377,38 +1330,14 @@ public class DFSUtil {
|
||||||
* Converts a Date into an ISO-8601 formatted datetime string.
|
* Converts a Date into an ISO-8601 formatted datetime string.
|
||||||
*/
|
*/
|
||||||
public static String dateToIso8601String(Date date) {
|
public static String dateToIso8601String(Date date) {
|
||||||
SimpleDateFormat df =
|
return DFSUtilClient.dateToIso8601String(date);
|
||||||
new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ", Locale.ENGLISH);
|
|
||||||
return df.format(date);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts a time duration in milliseconds into DDD:HH:MM:SS format.
|
* Converts a time duration in milliseconds into DDD:HH:MM:SS format.
|
||||||
*/
|
*/
|
||||||
public static String durationToString(long durationMs) {
|
public static String durationToString(long durationMs) {
|
||||||
boolean negative = false;
|
return DFSUtilClient.durationToString(durationMs);
|
||||||
if (durationMs < 0) {
|
|
||||||
negative = true;
|
|
||||||
durationMs = -durationMs;
|
|
||||||
}
|
|
||||||
// Chop off the milliseconds
|
|
||||||
long durationSec = durationMs / 1000;
|
|
||||||
final int secondsPerMinute = 60;
|
|
||||||
final int secondsPerHour = 60*60;
|
|
||||||
final int secondsPerDay = 60*60*24;
|
|
||||||
final long days = durationSec / secondsPerDay;
|
|
||||||
durationSec -= days * secondsPerDay;
|
|
||||||
final long hours = durationSec / secondsPerHour;
|
|
||||||
durationSec -= hours * secondsPerHour;
|
|
||||||
final long minutes = durationSec / secondsPerMinute;
|
|
||||||
durationSec -= minutes * secondsPerMinute;
|
|
||||||
final long seconds = durationSec;
|
|
||||||
final long milliseconds = durationMs % 1000;
|
|
||||||
String format = "%03d:%02d:%02d:%02d.%03d";
|
|
||||||
if (negative) {
|
|
||||||
format = "-" + format;
|
|
||||||
}
|
|
||||||
return String.format(format, days, hours, minutes, seconds, milliseconds);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -47,9 +47,9 @@ import org.apache.hadoop.fs.permission.FsAction;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
|
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
|
||||||
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos;
|
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos;
|
||||||
|
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||||
import org.apache.hadoop.hdfs.inotify.EventBatch;
|
import org.apache.hadoop.hdfs.inotify.EventBatch;
|
||||||
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
|
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
|
||||||
import org.apache.hadoop.hdfs.inotify.Event;
|
import org.apache.hadoop.hdfs.inotify.Event;
|
||||||
import org.apache.hadoop.hdfs.inotify.EventBatchList;
|
import org.apache.hadoop.hdfs.inotify.EventBatchList;
|
||||||
import org.apache.hadoop.hdfs.protocol.Block;
|
import org.apache.hadoop.hdfs.protocol.Block;
|
||||||
|
@ -469,7 +469,7 @@ public class PBHelper {
|
||||||
public static BlockKeyProto convert(BlockKey key) {
|
public static BlockKeyProto convert(BlockKey key) {
|
||||||
byte[] encodedKey = key.getEncodedKey();
|
byte[] encodedKey = key.getEncodedKey();
|
||||||
ByteString keyBytes = ByteString.copyFrom(encodedKey == null ?
|
ByteString keyBytes = ByteString.copyFrom(encodedKey == null ?
|
||||||
DFSUtil.EMPTY_BYTES : encodedKey);
|
DFSUtilClient.EMPTY_BYTES : encodedKey);
|
||||||
return BlockKeyProto.newBuilder().setKeyId(key.getKeyId())
|
return BlockKeyProto.newBuilder().setKeyId(key.getKeyId())
|
||||||
.setKeyBytes(keyBytes).setExpiryDate(key.getExpiryDate()).build();
|
.setKeyBytes(keyBytes).setExpiryDate(key.getExpiryDate()).build();
|
||||||
}
|
}
|
||||||
|
@ -1514,7 +1514,7 @@ public class PBHelper {
|
||||||
int snapshotQuota = status.getSnapshotQuota();
|
int snapshotQuota = status.getSnapshotQuota();
|
||||||
byte[] parentFullPath = status.getParentFullPath();
|
byte[] parentFullPath = status.getParentFullPath();
|
||||||
ByteString parentFullPathBytes = ByteString.copyFrom(
|
ByteString parentFullPathBytes = ByteString.copyFrom(
|
||||||
parentFullPath == null ? DFSUtil.EMPTY_BYTES : parentFullPath);
|
parentFullPath == null ? DFSUtilClient.EMPTY_BYTES : parentFullPath);
|
||||||
HdfsFileStatusProto fs = convert(status.getDirStatus());
|
HdfsFileStatusProto fs = convert(status.getDirStatus());
|
||||||
SnapshottableDirectoryStatusProto.Builder builder =
|
SnapshottableDirectoryStatusProto.Builder builder =
|
||||||
SnapshottableDirectoryStatusProto
|
SnapshottableDirectoryStatusProto
|
||||||
|
@ -2024,7 +2024,7 @@ public class PBHelper {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
ByteString sourcePath = ByteString
|
ByteString sourcePath = ByteString
|
||||||
.copyFrom(entry.getSourcePath() == null ? DFSUtil.EMPTY_BYTES : entry
|
.copyFrom(entry.getSourcePath() == null ? DFSUtilClient.EMPTY_BYTES : entry
|
||||||
.getSourcePath());
|
.getSourcePath());
|
||||||
String modification = entry.getType().getLabel();
|
String modification = entry.getType().getLabel();
|
||||||
SnapshotDiffReportEntryProto.Builder builder = SnapshotDiffReportEntryProto
|
SnapshotDiffReportEntryProto.Builder builder = SnapshotDiffReportEntryProto
|
||||||
|
@ -2032,7 +2032,7 @@ public class PBHelper {
|
||||||
.setModificationLabel(modification);
|
.setModificationLabel(modification);
|
||||||
if (entry.getType() == DiffType.RENAME) {
|
if (entry.getType() == DiffType.RENAME) {
|
||||||
ByteString targetPath = ByteString
|
ByteString targetPath = ByteString
|
||||||
.copyFrom(entry.getTargetPath() == null ? DFSUtil.EMPTY_BYTES : entry
|
.copyFrom(entry.getTargetPath() == null ? DFSUtilClient.EMPTY_BYTES : entry
|
||||||
.getTargetPath());
|
.getTargetPath());
|
||||||
builder.setTargetPath(targetPath);
|
builder.setTargetPath(targetPath);
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.hadoop.fs.ContentSummary;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.fs.permission.PermissionStatus;
|
import org.apache.hadoop.fs.permission.PermissionStatus;
|
||||||
|
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
|
import org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
import org.apache.hadoop.hdfs.DFSUtil;
|
||||||
|
@ -765,7 +766,7 @@ public abstract class INode implements INodeAttributes, Diff.Element<byte[]> {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final int compareTo(byte[] bytes) {
|
public final int compareTo(byte[] bytes) {
|
||||||
return DFSUtil.compareBytes(getLocalNameBytes(), bytes);
|
return DFSUtilClient.compareBytes(getLocalNameBytes(), bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -30,6 +30,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import javax.management.ObjectName;
|
import javax.management.ObjectName;
|
||||||
|
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
import org.apache.hadoop.hdfs.DFSUtil;
|
||||||
|
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||||
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
|
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
|
||||||
import org.apache.hadoop.hdfs.protocol.SnapshotException;
|
import org.apache.hadoop.hdfs.protocol.SnapshotException;
|
||||||
import org.apache.hadoop.hdfs.protocol.SnapshotInfo;
|
import org.apache.hadoop.hdfs.protocol.SnapshotInfo;
|
||||||
|
@ -342,7 +343,7 @@ public class SnapshotManager implements SnapshotStatsMXBean {
|
||||||
dir.getChildrenNum(Snapshot.CURRENT_STATE_ID),
|
dir.getChildrenNum(Snapshot.CURRENT_STATE_ID),
|
||||||
dir.getDirectorySnapshottableFeature().getNumSnapshots(),
|
dir.getDirectorySnapshottableFeature().getNumSnapshots(),
|
||||||
dir.getDirectorySnapshottableFeature().getSnapshotQuota(),
|
dir.getDirectorySnapshottableFeature().getSnapshotQuota(),
|
||||||
dir.getParent() == null ? DFSUtil.EMPTY_BYTES :
|
dir.getParent() == null ? DFSUtilClient.EMPTY_BYTES :
|
||||||
DFSUtil.string2Bytes(dir.getParent().getFullPathName()));
|
DFSUtil.string2Bytes(dir.getParent().getFullPathName()));
|
||||||
statusList.add(status);
|
statusList.add(status);
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue