HDFS-8089. Move o.a.h.hdfs.web.resources.* to the client jars. Contributed by Haohui Mai.

This commit is contained in:
Haohui Mai 2015-04-08 16:30:08 -07:00
parent 5a540c3d31
commit cc25823546
47 changed files with 79 additions and 54 deletions

View File

@ -29,6 +29,15 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<description>Apache Hadoop HDFS Client</description>
<name>Apache Hadoop HDFS Client</name>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>

View File

@ -19,8 +19,15 @@ package org.apache.hadoop.hdfs.client;
/** Client configuration properties */
public interface HdfsClientConfigKeys {
static final String PREFIX = "dfs.client.";
String DFS_BLOCK_SIZE_KEY = "dfs.blocksize";
long DFS_BLOCK_SIZE_DEFAULT = 128*1024*1024;
String DFS_REPLICATION_KEY = "dfs.replication";
short DFS_REPLICATION_DEFAULT = 3;
String DFS_WEBHDFS_USER_PATTERN_DEFAULT = "^[A-Za-z_][A-Za-z0-9._-]*[$]?$";
String DFS_WEBHDFS_ACL_PERMISSION_PATTERN_DEFAULT =
"^(default:)?(user|group|mask|other):[[A-Za-z_][A-Za-z0-9._-]]*:([rwx-]{3})?(,(default:)?(user|group|mask|other):[[A-Za-z_][A-Za-z0-9._-]]*:([rwx-]{3})?)*$";
static final String PREFIX = "dfs.client.";
/** Client retry configuration properties */
public interface Retry {
static final String PREFIX = HdfsClientConfigKeys.PREFIX + "retry.";

View File

@ -17,7 +17,8 @@
*/
package org.apache.hadoop.hdfs.web.resources;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEBHDFS_ACL_PERMISSION_PATTERN_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys
.DFS_WEBHDFS_ACL_PERMISSION_PATTERN_DEFAULT;
import java.util.List;
import java.util.regex.Pattern;
@ -37,7 +38,7 @@ public class AclPermissionParam extends StringParam {
/**
* Constructor.
*
*
* @param str a string representation of the parameter value.
*/
public AclPermissionParam(final String str) {

View File

@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hdfs.web.resources;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_BLOCK_SIZE_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_BLOCK_SIZE_KEY;
import org.apache.hadoop.conf.Configuration;

View File

@ -59,7 +59,7 @@ abstract class EnumSetParam<E extends Enum<E>> extends Param<EnumSet<E>, EnumSet
public String getValueString() {
return toString(value);
}
/** The domain of the parameter. */
static final class Domain<E extends Enum<E>> extends Param.Domain<EnumSet<E>> {
private final Class<E> enumClass;

View File

@ -50,7 +50,7 @@ public class GetOpParam extends HttpOpParam<GetOpParam.Op> {
Op(final boolean redirect, final int expectedHttpResponseCode) {
this(redirect, expectedHttpResponseCode, false);
}
Op(final boolean redirect, final int expectedHttpResponseCode,
final boolean requireAuth) {
this.redirect = redirect;
@ -62,7 +62,7 @@ public class GetOpParam extends HttpOpParam<GetOpParam.Op> {
public HttpOpParam.Type getType() {
return HttpOpParam.Type.GET;
}
@Override
public boolean getRequireAuth() {
return requireAuth;

View File

@ -45,7 +45,7 @@ public abstract class HttpOpParam<E extends Enum<E> & HttpOpParam.Op>
/** @return true if the operation cannot use a token */
public boolean getRequireAuth();
/** @return true if the operation will do output. */
public boolean getDoOutput();
@ -69,7 +69,7 @@ public abstract class HttpOpParam<E extends Enum<E> & HttpOpParam.Op>
GetOpParam.Op.OPEN);
static final TemporaryRedirectOp GETFILECHECKSUM = new TemporaryRedirectOp(
GetOpParam.Op.GETFILECHECKSUM);
static final List<TemporaryRedirectOp> values
= Collections.unmodifiableList(Arrays.asList(CREATE, APPEND, OPEN,
GETFILECHECKSUM));

View File

@ -38,7 +38,7 @@ abstract class IntegerParam extends Param<Integer, IntegerParam.Domain> {
+ " = " + domain.toString(value) + " > " + domain.toString(max));
}
}
@Override
public String toString() {
return getName() + "=" + domain.toString(getValue());
@ -80,7 +80,7 @@ abstract class IntegerParam extends Param<Integer, IntegerParam.Domain> {
}
}
/** Convert an Integer to a String. */
/** Convert an Integer to a String. */
String toString(final Integer n) {
return n == null? NULL: Integer.toString(n, radix);
}

View File

@ -37,7 +37,7 @@ abstract class LongParam extends Param<Long, LongParam.Domain> {
+ " = " + domain.toString(value) + " > " + domain.toString(max));
}
}
@Override
public String toString() {
return getName() + "=" + domain.toString(getValue());
@ -79,7 +79,7 @@ abstract class LongParam extends Param<Long, LongParam.Domain> {
}
}
/** Convert a Long to a String. */
/** Convert a Long to a String. */
String toString(final Long n) {
return n == null? NULL: Long.toString(n, radix);
}

View File

@ -28,7 +28,7 @@ public class OldSnapshotNameParam extends StringParam {
public static final String DEFAULT = "";
private static final Domain DOMAIN = new Domain(NAME, null);
public OldSnapshotNameParam(final String str) {
super(DOMAIN, str != null && !str.equals(DEFAULT) ? str : null);
}

View File

@ -26,7 +26,7 @@ import java.util.Comparator;
/** Base class of parameters. */
public abstract class Param<T, D extends Param.Domain<T>> {
static final String NULL = "null";
static final Comparator<Param<?,?>> NAME_CMP = new Comparator<Param<?,?>>() {
@Override
public int compare(Param<?, ?> left, Param<?, ?> right) {
@ -90,11 +90,11 @@ public abstract class Param<T, D extends Param.Domain<T>> {
static abstract class Domain<T> {
/** Parameter name. */
final String paramName;
Domain(final String paramName) {
this.paramName = paramName;
}
/** @return the parameter name. */
public final String getParamName() {
return paramName;

View File

@ -34,7 +34,7 @@ public class PermissionParam extends ShortParam {
public static FsPermission getDefaultFsPermission() {
return new FsPermission(DEFAULT_PERMISSION);
}
/**
* Constructor.
* @param value the parameter value.

View File

@ -43,7 +43,7 @@ public class PostOpParam extends HttpOpParam<PostOpParam.Op> {
public Type getType() {
return Type.POST;
}
@Override
public boolean getRequireAuth() {
return false;

View File

@ -33,22 +33,22 @@ public class PutOpParam extends HttpOpParam<PutOpParam.Op> {
SETOWNER(false, HttpURLConnection.HTTP_OK),
SETPERMISSION(false, HttpURLConnection.HTTP_OK),
SETTIMES(false, HttpURLConnection.HTTP_OK),
RENEWDELEGATIONTOKEN(false, HttpURLConnection.HTTP_OK, true),
CANCELDELEGATIONTOKEN(false, HttpURLConnection.HTTP_OK, true),
MODIFYACLENTRIES(false, HttpURLConnection.HTTP_OK),
REMOVEACLENTRIES(false, HttpURLConnection.HTTP_OK),
REMOVEDEFAULTACL(false, HttpURLConnection.HTTP_OK),
REMOVEACL(false, HttpURLConnection.HTTP_OK),
SETACL(false, HttpURLConnection.HTTP_OK),
SETXATTR(false, HttpURLConnection.HTTP_OK),
SETXATTR(false, HttpURLConnection.HTTP_OK),
REMOVEXATTR(false, HttpURLConnection.HTTP_OK),
CREATESNAPSHOT(false, HttpURLConnection.HTTP_OK),
RENAMESNAPSHOT(false, HttpURLConnection.HTTP_OK),
NULL(false, HttpURLConnection.HTTP_NOT_IMPLEMENTED);
final boolean doOutputAndRedirect;
@ -58,7 +58,7 @@ public class PutOpParam extends HttpOpParam<PutOpParam.Op> {
Op(final boolean doOutputAndRedirect, final int expectedHttpResponseCode) {
this(doOutputAndRedirect, expectedHttpResponseCode, false);
}
Op(final boolean doOutputAndRedirect, final int expectedHttpResponseCode,
final boolean requireAuth) {
this.doOutputAndRedirect = doOutputAndRedirect;
@ -70,7 +70,7 @@ public class PutOpParam extends HttpOpParam<PutOpParam.Op> {
public HttpOpParam.Type getType() {
return HttpOpParam.Type.PUT;
}
@Override
public boolean getRequireAuth() {
return requireAuth;

View File

@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hdfs.web.resources;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_REPLICATION_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_REPLICATION_KEY;
import org.apache.hadoop.conf.Configuration;

View File

@ -38,7 +38,7 @@ abstract class ShortParam extends Param<Short, ShortParam.Domain> {
+ " = " + domain.toString(value) + " > " + domain.toString(max));
}
}
@Override
public String toString() {
return getName() + "=" + domain.toString(getValue());
@ -80,7 +80,7 @@ abstract class ShortParam extends Param<Short, ShortParam.Domain> {
}
}
/** Convert a Short to a String. */
/** Convert a Short to a String. */
String toString(final Short n) {
return n == null? NULL: Integer.toString(n, radix);
}

View File

@ -29,7 +29,7 @@ public class SnapshotNameParam extends StringParam {
public static final String DEFAULT = "";
private static final Domain DOMAIN = new Domain(NAME, null);
public SnapshotNameParam(final String str) {
super(DOMAIN, str != null && !str.equals(DEFAULT) ? str : null);
}

View File

@ -17,10 +17,10 @@
*/
package org.apache.hadoop.hdfs.web.resources;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT;
import org.apache.hadoop.security.UserGroupInformation;
import com.google.common.annotations.VisibleForTesting;
import java.text.MessageFormat;
import java.util.regex.Pattern;

View File

@ -24,14 +24,14 @@ public class XAttrEncodingParam extends EnumParam<XAttrCodec> {
public static final String NAME = "encoding";
/** Default parameter value. */
public static final String DEFAULT = "";
private static final Domain<XAttrCodec> DOMAIN =
private static final Domain<XAttrCodec> DOMAIN =
new Domain<XAttrCodec>(NAME, XAttrCodec.class);
public XAttrEncodingParam(final XAttrCodec encoding) {
super(DOMAIN, encoding);
}
/**
* Constructor.
* @param str a string representation of the parameter value.
@ -44,12 +44,12 @@ public class XAttrEncodingParam extends EnumParam<XAttrCodec> {
public String getName() {
return NAME;
}
@Override
public String getValueString() {
return value.toString();
}
public XAttrCodec getEncoding() {
return getValue();
}

View File

@ -24,10 +24,10 @@ public class XAttrNameParam extends StringParam {
public static final String NAME = "xattr.name";
/** Default parameter value. **/
public static final String DEFAULT = "";
private static Domain DOMAIN = new Domain(NAME,
Pattern.compile(".*"));
public XAttrNameParam(final String str) {
super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
}
@ -36,7 +36,7 @@ public class XAttrNameParam extends StringParam {
public String getName() {
return NAME;
}
public String getXAttrName() {
final String v = getValue();
return v;

View File

@ -33,7 +33,7 @@ public class XAttrSetFlagParam extends EnumSetParam<XAttrSetFlag> {
public XAttrSetFlagParam(final EnumSet<XAttrSetFlag> flag) {
super(DOMAIN, flag);
}
/**
* Constructor.
* @param str a string representation of the parameter value.
@ -46,7 +46,7 @@ public class XAttrSetFlagParam extends EnumSetParam<XAttrSetFlag> {
public String getName() {
return NAME;
}
public EnumSet<XAttrSetFlag> getFlag() {
return getValue();
}

View File

@ -26,9 +26,9 @@ public class XAttrValueParam extends StringParam {
public static final String NAME = "xattr.value";
/** Default parameter value. **/
public static final String DEFAULT = "";
private static Domain DOMAIN = new Domain(NAME, null);
public XAttrValueParam(final String str) {
super(DOMAIN, str == null || str.equals(DEFAULT) ? null : str);
}
@ -37,7 +37,7 @@ public class XAttrValueParam extends StringParam {
public String getName() {
return NAME;
}
public byte[] getXAttrValue() throws IOException {
final String v = getValue();
return XAttrCodec.decodeValue(v);

View File

@ -402,6 +402,8 @@ Release 2.8.0 - UNRELEASED
HDFS-8025. Addendum fix for HDFS-3087 Decomissioning on NN restart can
complete without blocks being replicated. (Ming Ma via wang)
HDFS-8089. Move o.a.h.hdfs.web.resources.* to the client jars. (wheat9)
OPTIMIZATIONS
HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

View File

@ -34,10 +34,15 @@ import org.apache.hadoop.http.HttpConfig;
*/
@InterfaceAudience.Private
public class DFSConfigKeys extends CommonConfigurationKeys {
public static final String DFS_BLOCK_SIZE_KEY = "dfs.blocksize";
public static final long DFS_BLOCK_SIZE_DEFAULT = 128*1024*1024;
public static final String DFS_REPLICATION_KEY = "dfs.replication";
public static final short DFS_REPLICATION_DEFAULT = 3;
public static final String DFS_BLOCK_SIZE_KEY =
HdfsClientConfigKeys.DFS_BLOCK_SIZE_KEY;
public static final long DFS_BLOCK_SIZE_DEFAULT =
HdfsClientConfigKeys.DFS_BLOCK_SIZE_DEFAULT;
public static final String DFS_REPLICATION_KEY =
HdfsClientConfigKeys.DFS_REPLICATION_KEY;
public static final short DFS_REPLICATION_DEFAULT =
HdfsClientConfigKeys.DFS_REPLICATION_DEFAULT;
public static final String DFS_STREAM_BUFFER_SIZE_KEY = "dfs.stream-buffer-size";
public static final int DFS_STREAM_BUFFER_SIZE_DEFAULT = 4096;
public static final String DFS_BYTES_PER_CHECKSUM_KEY = "dfs.bytes-per-checksum";
@ -49,7 +54,7 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
public static final String DFS_HDFS_BLOCKS_METADATA_ENABLED = "dfs.datanode.hdfs-blocks-metadata.enabled";
public static final boolean DFS_HDFS_BLOCKS_METADATA_ENABLED_DEFAULT = false;
public static final String DFS_WEBHDFS_ACL_PERMISSION_PATTERN_DEFAULT =
"^(default:)?(user|group|mask|other):[[A-Za-z_][A-Za-z0-9._-]]*:([rwx-]{3})?(,(default:)?(user|group|mask|other):[[A-Za-z_][A-Za-z0-9._-]]*:([rwx-]{3})?)*$";
HdfsClientConfigKeys.DFS_WEBHDFS_ACL_PERMISSION_PATTERN_DEFAULT;
// HA related configuration
public static final String DFS_DATANODE_RESTART_REPLICA_EXPIRY_KEY = "dfs.datanode.restart.replica.expiration";
@ -154,7 +159,8 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
public static final String DFS_WEBHDFS_AUTHENTICATION_FILTER_KEY = "dfs.web.authentication.filter";
public static final String DFS_WEBHDFS_AUTHENTICATION_FILTER_DEFAULT = AuthFilter.class.getName();
public static final String DFS_WEBHDFS_USER_PATTERN_KEY = "dfs.webhdfs.user.provider.user.pattern";
public static final String DFS_WEBHDFS_USER_PATTERN_DEFAULT = "^[A-Za-z_][A-Za-z0-9._-]*[$]?$";
public static final String DFS_WEBHDFS_USER_PATTERN_DEFAULT =
HdfsClientConfigKeys.DFS_WEBHDFS_USER_PATTERN_DEFAULT;
public static final String DFS_PERMISSIONS_ENABLED_KEY = "dfs.permissions.enabled";
public static final boolean DFS_PERMISSIONS_ENABLED_DEFAULT = true;
public static final String DFS_PERMISSIONS_SUPERUSERGROUP_KEY = "dfs.permissions.superusergroup";