svn merge -c 1187140 from trunk for HDFS-2427.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1189496 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2011-10-26 22:00:28 +00:00
parent 4f9ea3cc2e
commit 4e431dac38
16 changed files with 334 additions and 29 deletions

View File

@ -731,6 +731,9 @@ Release 0.23.0 - Unreleased
HDFS-2395. Add a root element in the JSON responses of webhdfs.
(szetszwo)
HDFS-2427. Change the default permission in webhdfs to 755 and add range
check/validation for all parameters. (szetszwo)
OPTIMIZATIONS
HDFS-1458. Improve checkpoint performance by avoiding unnecessary image

View File

@ -66,7 +66,7 @@ import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
import org.apache.hadoop.hdfs.web.resources.DelegationParam;
import org.apache.hadoop.hdfs.web.resources.DeleteOpParam;
import org.apache.hadoop.hdfs.web.resources.DstPathParam;
import org.apache.hadoop.hdfs.web.resources.DestinationParam;
import org.apache.hadoop.hdfs.web.resources.GetOpParam;
import org.apache.hadoop.hdfs.web.resources.GroupParam;
import org.apache.hadoop.hdfs.web.resources.HttpOpParam;
@ -201,8 +201,8 @@ public class NamenodeWebHdfsMethods {
@PathParam(UriFsPathParam.NAME) final UriFsPathParam path,
@QueryParam(PutOpParam.NAME) @DefaultValue(PutOpParam.DEFAULT)
final PutOpParam op,
@QueryParam(DstPathParam.NAME) @DefaultValue(DstPathParam.DEFAULT)
final DstPathParam dstPath,
@QueryParam(DestinationParam.NAME) @DefaultValue(DestinationParam.DEFAULT)
final DestinationParam destination,
@QueryParam(OwnerParam.NAME) @DefaultValue(OwnerParam.DEFAULT)
final OwnerParam owner,
@QueryParam(GroupParam.NAME) @DefaultValue(GroupParam.DEFAULT)
@ -227,7 +227,7 @@ public class NamenodeWebHdfsMethods {
if (LOG.isTraceEnabled()) {
LOG.trace(op + ": " + path + ", ugi=" + ugi
+ Param.toSortedString(", ", dstPath, owner, group, permission,
+ Param.toSortedString(", ", destination, owner, group, permission,
overwrite, bufferSize, replication, blockSize,
modificationTime, accessTime, renameOptions));
}
@ -264,7 +264,7 @@ public class NamenodeWebHdfsMethods {
{
final EnumSet<Options.Rename> s = renameOptions.getValue();
if (s.isEmpty()) {
final boolean b = np.rename(fullpath, dstPath.getValue());
final boolean b = np.rename(fullpath, destination.getValue());
final String js = JsonUtil.toJsonString("boolean", b);
return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
} else {

View File

@ -59,7 +59,7 @@ import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
import org.apache.hadoop.hdfs.web.resources.DeleteOpParam;
import org.apache.hadoop.hdfs.web.resources.DstPathParam;
import org.apache.hadoop.hdfs.web.resources.DestinationParam;
import org.apache.hadoop.hdfs.web.resources.GetOpParam;
import org.apache.hadoop.hdfs.web.resources.GroupParam;
import org.apache.hadoop.hdfs.web.resources.HttpOpParam;
@ -292,7 +292,7 @@ public class WebHdfsFileSystem extends HftpFileSystem {
statistics.incrementWriteOps(1);
final HttpOpParam.Op op = PutOpParam.Op.RENAME;
final Map<String, Object> json = run(op, src,
new DstPathParam(makeQualified(dst).toUri().getPath()));
new DestinationParam(makeQualified(dst).toUri().getPath()));
return (Boolean)json.get("boolean");
}
@ -302,7 +302,7 @@ public class WebHdfsFileSystem extends HftpFileSystem {
final Options.Rename... options) throws IOException {
statistics.incrementWriteOps(1);
final HttpOpParam.Op op = PutOpParam.Op.RENAME;
run(op, src, new DstPathParam(makeQualified(dst).toUri().getPath()),
run(op, src, new DestinationParam(makeQualified(dst).toUri().getPath()),
new RenameOptionSetParam(options));
}

View File

@ -31,7 +31,7 @@ public class AccessTimeParam extends LongParam {
* @param value the parameter value.
*/
public AccessTimeParam(final Long value) {
super(DOMAIN, value);
super(DOMAIN, value, -1L, null);
}
/**

View File

@ -36,7 +36,7 @@ public class BlockSizeParam extends LongParam {
* @param value the parameter value.
*/
public BlockSizeParam(final Long value) {
super(DOMAIN, value);
super(DOMAIN, value, 1L, null);
}
/**

View File

@ -34,7 +34,7 @@ public class BufferSizeParam extends IntegerParam {
* @param value the parameter value.
*/
public BufferSizeParam(final Integer value) {
super(DOMAIN, value);
super(DOMAIN, value, 1, null);
}
/**

View File

@ -20,20 +20,31 @@ package org.apache.hadoop.hdfs.web.resources;
import org.apache.hadoop.fs.Path;
/** Destination path parameter. */
public class DstPathParam extends StringParam {
public class DestinationParam extends StringParam {
/** Parameter name. */
public static final String NAME = "dstpath";
public static final String NAME = "destination";
/** Default parameter value. */
public static final String DEFAULT = "";
private static final Domain DOMAIN = new Domain(NAME, null);
private static String validate(final String str) {
if (str == null || str.equals(DEFAULT)) {
return null;
}
if (!str.startsWith(Path.SEPARATOR)) {
throw new IllegalArgumentException("Invalid parameter value: " + NAME
+ " = \"" + str + "\" is not an absolute path.");
}
return new Path(str).toUri().getPath();
}
/**
* Constructor.
* @param str a string representation of the parameter value.
*/
public DstPathParam(final String str) {
super(DOMAIN, str == null || str.equals(DEFAULT)? null: new Path(str).toUri().getPath());
public DestinationParam(final String str) {
super(DOMAIN, validate(str));
}
@Override

View File

@ -19,8 +19,24 @@ package org.apache.hadoop.hdfs.web.resources;
/** Integer parameter. */
abstract class IntegerParam extends Param<Integer, IntegerParam.Domain> {
IntegerParam(final Domain domain, final Integer value) {
IntegerParam(final Domain domain, final Integer value,
final Integer min, final Integer max) {
super(domain, value);
checkRange(min, max);
}
private void checkRange(final Integer min, final Integer max) {
if (value == null) {
return;
}
if (min != null && value < min) {
throw new IllegalArgumentException("Invalid parameter range: " + getName()
+ " = " + domain.toString(value) + " < " + domain.toString(min));
}
if (max != null && value > max) {
throw new IllegalArgumentException("Invalid parameter range: " + getName()
+ " = " + domain.toString(value) + " > " + domain.toString(max));
}
}
@Override
@ -49,7 +65,12 @@ abstract class IntegerParam extends Param<Integer, IntegerParam.Domain> {
@Override
Integer parse(final String str) {
return NULL.equals(str)? null: Integer.parseInt(str, radix);
try{
return NULL.equals(str)? null: Integer.parseInt(str, radix);
} catch(NumberFormatException e) {
throw new IllegalArgumentException("Failed to parse \"" + str
+ "\" as a radix-" + radix + " integer.", e);
}
}
/** Convert an Integer to a String. */

View File

@ -31,7 +31,7 @@ public class LengthParam extends LongParam {
* @param value the parameter value.
*/
public LengthParam(final Long value) {
super(DOMAIN, value);
super(DOMAIN, value, 0L, null);
}
/**

View File

@ -19,8 +19,23 @@ package org.apache.hadoop.hdfs.web.resources;
/** Long parameter. */
abstract class LongParam extends Param<Long, LongParam.Domain> {
LongParam(final Domain domain, final Long value) {
LongParam(final Domain domain, final Long value, final Long min, final Long max) {
super(domain, value);
checkRange(min, max);
}
private void checkRange(final Long min, final Long max) {
if (value == null) {
return;
}
if (min != null && value < min) {
throw new IllegalArgumentException("Invalid parameter range: " + getName()
+ " = " + domain.toString(value) + " < " + domain.toString(min));
}
if (max != null && value > max) {
throw new IllegalArgumentException("Invalid parameter range: " + getName()
+ " = " + domain.toString(value) + " > " + domain.toString(max));
}
}
@Override
@ -49,7 +64,12 @@ abstract class LongParam extends Param<Long, LongParam.Domain> {
@Override
Long parse(final String str) {
return NULL.equals(str)? null: Long.parseLong(str, radix);
try {
return NULL.equals(str)? null: Long.parseLong(str, radix);
} catch(NumberFormatException e) {
throw new IllegalArgumentException("Failed to parse \"" + str
+ "\" as a radix-" + radix + " long integer.", e);
}
}
/** Convert a Short to a String. */

View File

@ -31,7 +31,7 @@ public class ModificationTimeParam extends LongParam {
* @param value the parameter value.
*/
public ModificationTimeParam(final Long value) {
super(DOMAIN, value);
super(DOMAIN, value, -1L, null);
}
/**

View File

@ -31,7 +31,7 @@ public class OffsetParam extends LongParam {
* @param value the parameter value.
*/
public OffsetParam(final Long value) {
super(DOMAIN, value);
super(DOMAIN, value, 0L, null);
}
/**

View File

@ -28,12 +28,14 @@ public class PermissionParam extends ShortParam {
private static final Domain DOMAIN = new Domain(NAME, 8);
private static final short DEFAULT_PERMISSION = 0755;
/**
* Constructor.
* @param value the parameter value.
*/
public PermissionParam(final FsPermission value) {
super(DOMAIN, value == null? null: value.toShort());
super(DOMAIN, value == null? null: value.toShort(), null, null);
}
/**
@ -41,7 +43,7 @@ public class PermissionParam extends ShortParam {
* @param str a string representation of the parameter value.
*/
public PermissionParam(final String str) {
super(DOMAIN, DOMAIN.parse(str));
super(DOMAIN, DOMAIN.parse(str), (short)0, (short)01777);
}
@Override
@ -51,7 +53,7 @@ public class PermissionParam extends ShortParam {
/** @return the represented FsPermission. */
public FsPermission getFsPermission() {
final Short mode = getValue();
return mode == null? FsPermission.getDefault(): new FsPermission(mode);
final Short v = getValue();
return new FsPermission(v != null? v: DEFAULT_PERMISSION);
}
}

View File

@ -36,7 +36,7 @@ public class ReplicationParam extends ShortParam {
* @param value the parameter value.
*/
public ReplicationParam(final Short value) {
super(DOMAIN, value);
super(DOMAIN, value, (short)1, null);
}
/**

View File

@ -19,8 +19,24 @@ package org.apache.hadoop.hdfs.web.resources;
/** Short parameter. */
abstract class ShortParam extends Param<Short, ShortParam.Domain> {
ShortParam(final Domain domain, final Short value) {
ShortParam(final Domain domain, final Short value,
final Short min, final Short max) {
super(domain, value);
checkRange(min, max);
}
private void checkRange(final Short min, final Short max) {
if (value == null) {
return;
}
if (min != null && value < min) {
throw new IllegalArgumentException("Invalid parameter range: " + getName()
+ " = " + domain.toString(value) + " < " + domain.toString(min));
}
if (max != null && value > max) {
throw new IllegalArgumentException("Invalid parameter range: " + getName()
+ " = " + domain.toString(value) + " > " + domain.toString(max));
}
}
@Override
@ -49,7 +65,12 @@ abstract class ShortParam extends Param<Short, ShortParam.Domain> {
@Override
Short parse(final String str) {
return NULL.equals(str)? null: Short.parseShort(str, radix);
try {
return NULL.equals(str)? null: Short.parseShort(str, radix);
} catch(NumberFormatException e) {
throw new IllegalArgumentException("Failed to parse \"" + str
+ "\" as a radix-" + radix + " short integer.", e);
}
}
/** Convert a Short to a String. */

View File

@ -0,0 +1,227 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.web.resources;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.junit.Assert;
import org.junit.Test;
public class TestParam {
public static final Log LOG = LogFactory.getLog(TestParam.class);
final Configuration conf = new Configuration();
@Test
public void testAccessTimeParam() {
final AccessTimeParam p = new AccessTimeParam(AccessTimeParam.DEFAULT);
Assert.assertEquals(-1L, p.getValue().longValue());
new AccessTimeParam(-1L);
try {
new AccessTimeParam(-2L);
Assert.fail();
} catch(IllegalArgumentException e) {
LOG.info("EXPECTED: " + e);
}
}
@Test
public void testBlockSizeParam() {
final BlockSizeParam p = new BlockSizeParam(BlockSizeParam.DEFAULT);
Assert.assertEquals(null, p.getValue());
Assert.assertEquals(
conf.getLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY,
DFSConfigKeys.DFS_BLOCK_SIZE_DEFAULT),
p.getValue(conf));
new BlockSizeParam(1L);
try {
new BlockSizeParam(0L);
Assert.fail();
} catch(IllegalArgumentException e) {
LOG.info("EXPECTED: " + e);
}
}
@Test
public void testBufferSizeParam() {
final BufferSizeParam p = new BufferSizeParam(BufferSizeParam.DEFAULT);
Assert.assertEquals(null, p.getValue());
Assert.assertEquals(
conf.getInt(CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY,
CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT),
p.getValue(conf));
new BufferSizeParam(1);
try {
new BufferSizeParam(0);
Assert.fail();
} catch(IllegalArgumentException e) {
LOG.info("EXPECTED: " + e);
}
}
@Test
public void testDelegationParam() {
final DelegationParam p = new DelegationParam(DelegationParam.DEFAULT);
Assert.assertEquals(null, p.getValue());
}
@Test
public void testDestinationParam() {
final DestinationParam p = new DestinationParam(DestinationParam.DEFAULT);
Assert.assertEquals(null, p.getValue());
new DestinationParam("/abc");
try {
new DestinationParam("abc");
Assert.fail();
} catch(IllegalArgumentException e) {
LOG.info("EXPECTED: " + e);
}
}
@Test
public void testGroupParam() {
final GroupParam p = new GroupParam(GroupParam.DEFAULT);
Assert.assertEquals(null, p.getValue());
}
@Test
public void testModificationTimeParam() {
final ModificationTimeParam p = new ModificationTimeParam(ModificationTimeParam.DEFAULT);
Assert.assertEquals(-1L, p.getValue().longValue());
new ModificationTimeParam(-1L);
try {
new ModificationTimeParam(-2L);
Assert.fail();
} catch(IllegalArgumentException e) {
LOG.info("EXPECTED: " + e);
}
}
@Test
public void testOverwriteParam() {
final OverwriteParam p = new OverwriteParam(OverwriteParam.DEFAULT);
Assert.assertEquals(false, p.getValue());
new OverwriteParam("trUe");
try {
new OverwriteParam("abc");
Assert.fail();
} catch(IllegalArgumentException e) {
LOG.info("EXPECTED: " + e);
}
}
@Test
public void testOwnerParam() {
final OwnerParam p = new OwnerParam(OwnerParam.DEFAULT);
Assert.assertEquals(null, p.getValue());
}
@Test
public void testPermissionParam() {
final PermissionParam p = new PermissionParam(PermissionParam.DEFAULT);
Assert.assertEquals(new FsPermission((short)0755), p.getFsPermission());
new PermissionParam("0");
try {
new PermissionParam("-1");
Assert.fail();
} catch(IllegalArgumentException e) {
LOG.info("EXPECTED: " + e);
}
new PermissionParam("1777");
try {
new PermissionParam("2000");
Assert.fail();
} catch(IllegalArgumentException e) {
LOG.info("EXPECTED: " + e);
}
try {
new PermissionParam("8");
Assert.fail();
} catch(IllegalArgumentException e) {
LOG.info("EXPECTED: " + e);
}
try {
new PermissionParam("abc");
Assert.fail();
} catch(IllegalArgumentException e) {
LOG.info("EXPECTED: " + e);
}
}
@Test
public void testRecursiveParam() {
final RecursiveParam p = new RecursiveParam(RecursiveParam.DEFAULT);
Assert.assertEquals(false, p.getValue());
new RecursiveParam("falSe");
try {
new RecursiveParam("abc");
Assert.fail();
} catch(IllegalArgumentException e) {
LOG.info("EXPECTED: " + e);
}
}
@Test
public void testRenewerParam() {
final RenewerParam p = new RenewerParam(RenewerParam.DEFAULT);
Assert.assertEquals(null, p.getValue());
}
@Test
public void testReplicationParam() {
final ReplicationParam p = new ReplicationParam(ReplicationParam.DEFAULT);
Assert.assertEquals(null, p.getValue());
Assert.assertEquals(
(short)conf.getInt(DFSConfigKeys.DFS_REPLICATION_KEY,
DFSConfigKeys.DFS_REPLICATION_DEFAULT),
p.getValue(conf));
new ReplicationParam((short)1);
try {
new ReplicationParam((short)0);
Assert.fail();
} catch(IllegalArgumentException e) {
LOG.info("EXPECTED: " + e);
}
}
}