svn merge -c 1461040 from trunk for HDFS-4598. Fix the default value of ConcatSourcesParam and the WebHDFS doc.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1461042 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2013-03-26 09:57:19 +00:00
parent 13aa009480
commit 17a304973c
6 changed files with 80 additions and 29 deletions

View File

@ -106,6 +106,9 @@ Release 2.0.5-beta - UNRELEASED
HDFS-4609. TestAuditLogs should release log handles between tests. HDFS-4609. TestAuditLogs should release log handles between tests.
(Ivan Mitic via szetszwo) (Ivan Mitic via szetszwo)
HDFS-4598. Fix the default value of ConcatSourcesParam and the WebHDFS doc.
(szetszwo)
Release 2.0.4-alpha - UNRELEASED Release 2.0.4-alpha - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -312,13 +312,14 @@ public class DistributedFileSystem extends FileSystem {
} }
/** /**
* Move blocks from srcs to trg * Move blocks from srcs to trg and delete srcs afterwards.
* and delete srcs afterwards * The file block sizes must be the same.
* RESTRICTION: all blocks should be the same size *
* @param trg existing file to append to * @param trg existing file to append to
* @param psrcs list of files (same block size, same replication) * @param psrcs list of files (same block size, same replication)
* @throws IOException * @throws IOException
*/ */
@Override
public void concat(Path trg, Path [] psrcs) throws IOException { public void concat(Path trg, Path [] psrcs) throws IOException {
String [] srcs = new String [psrcs.length]; String [] srcs = new String [psrcs.length];
for(int i=0; i<psrcs.length; i++) { for(int i=0; i<psrcs.length; i++) {

View File

@ -30,7 +30,6 @@ import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -64,7 +63,33 @@ import org.apache.hadoop.hdfs.protocol.UnresolvedPathException;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSelector; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSelector;
import org.apache.hadoop.hdfs.server.namenode.SafeModeException; import org.apache.hadoop.hdfs.server.namenode.SafeModeException;
import org.apache.hadoop.hdfs.web.resources.*; import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
import org.apache.hadoop.hdfs.web.resources.ConcatSourcesParam;
import org.apache.hadoop.hdfs.web.resources.CreateParentParam;
import org.apache.hadoop.hdfs.web.resources.DelegationParam;
import org.apache.hadoop.hdfs.web.resources.DeleteOpParam;
import org.apache.hadoop.hdfs.web.resources.DestinationParam;
import org.apache.hadoop.hdfs.web.resources.DoAsParam;
import org.apache.hadoop.hdfs.web.resources.GetOpParam;
import org.apache.hadoop.hdfs.web.resources.GroupParam;
import org.apache.hadoop.hdfs.web.resources.HttpOpParam;
import org.apache.hadoop.hdfs.web.resources.LengthParam;
import org.apache.hadoop.hdfs.web.resources.ModificationTimeParam;
import org.apache.hadoop.hdfs.web.resources.OffsetParam;
import org.apache.hadoop.hdfs.web.resources.OverwriteParam;
import org.apache.hadoop.hdfs.web.resources.OwnerParam;
import org.apache.hadoop.hdfs.web.resources.Param;
import org.apache.hadoop.hdfs.web.resources.PermissionParam;
import org.apache.hadoop.hdfs.web.resources.PostOpParam;
import org.apache.hadoop.hdfs.web.resources.PutOpParam;
import org.apache.hadoop.hdfs.web.resources.RecursiveParam;
import org.apache.hadoop.hdfs.web.resources.RenameOptionSetParam;
import org.apache.hadoop.hdfs.web.resources.RenewerParam;
import org.apache.hadoop.hdfs.web.resources.ReplicationParam;
import org.apache.hadoop.hdfs.web.resources.TokenArgumentParam;
import org.apache.hadoop.hdfs.web.resources.UserParam;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryPolicy;
import org.apache.hadoop.io.retry.RetryUtils; import org.apache.hadoop.io.retry.RetryUtils;
@ -82,7 +107,6 @@ import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.token.TokenRenewer; import org.apache.hadoop.security.token.TokenRenewer;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector; import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.StringUtils;
import org.mortbay.util.ajax.JSON; import org.mortbay.util.ajax.JSON;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
@ -729,17 +753,10 @@ public class WebHdfsFileSystem extends FileSystem
} }
@Override @Override
public void concat(final Path trg, final Path [] psrcs) throws IOException { public void concat(final Path trg, final Path [] srcs) throws IOException {
statistics.incrementWriteOps(1); statistics.incrementWriteOps(1);
final HttpOpParam.Op op = PostOpParam.Op.CONCAT; final HttpOpParam.Op op = PostOpParam.Op.CONCAT;
List<String> strPaths = new ArrayList<String>(psrcs.length);
for(Path psrc : psrcs) {
strPaths.add(psrc.toUri().getPath());
}
String srcs = StringUtils.join(",", strPaths);
ConcatSourcesParam param = new ConcatSourcesParam(srcs); ConcatSourcesParam param = new ConcatSourcesParam(srcs);
run(op, trg, param); run(op, trg, param);
} }

View File

@ -18,15 +18,28 @@
package org.apache.hadoop.hdfs.web.resources; package org.apache.hadoop.hdfs.web.resources;
import org.apache.hadoop.fs.Path;
/** The concat source paths parameter. */ /** The concat source paths parameter. */
public class ConcatSourcesParam extends StringParam { public class ConcatSourcesParam extends StringParam {
/** Parameter name. */ /** Parameter name. */
public static final String NAME = "sources"; public static final String NAME = "sources";
public static final String DEFAULT = NULL; public static final String DEFAULT = "";
private static final Domain DOMAIN = new Domain(NAME, null); private static final Domain DOMAIN = new Domain(NAME, null);
private static String paths2String(Path[] paths) {
if (paths == null || paths.length == 0) {
return "";
}
final StringBuilder b = new StringBuilder(paths[0].toUri().getPath());
for(int i = 1; i < paths.length; i++) {
b.append(',').append(paths[i].toUri().getPath());
}
return b.toString();
}
/** /**
* Constructor. * Constructor.
* @param str a string representation of the parameter value. * @param str a string representation of the parameter value.
@ -35,6 +48,10 @@ public class ConcatSourcesParam extends StringParam {
super(DOMAIN, str); super(DOMAIN, str);
} }
public ConcatSourcesParam(Path[] paths) {
this(paths2String(paths));
}
@Override @Override
public String getName() { public String getName() {
return NAME; return NAME;

View File

@ -109,7 +109,7 @@ WebHDFS REST API
* {{{Append to a File}<<<APPEND>>>}} * {{{Append to a File}<<<APPEND>>>}}
(see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.append) (see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.append)
* {{{Concat File(s)}<<<CONCAT>>>}} * {{{Concatenate Files}<<<CONCAT>>>}}
(see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.concat) (see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.concat)
* HTTP DELETE * HTTP DELETE
@ -307,7 +307,7 @@ Content-Length: 0
* Submit a HTTP POST request. * Submit a HTTP POST request.
+--------------------------------- +---------------------------------
curl -i -X POST "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=CONCAT&sources=<SOURCES>" curl -i -X POST "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=CONCAT&sources=<PATHS>"
+--------------------------------- +---------------------------------
The client receives a response with zero content length: The client receives a response with zero content length:
@ -319,10 +319,6 @@ Content-Length: 0
[] []
This REST API call is available as of Hadoop version 2.0.3.
Please note that <SOURCES> is a comma seperated list of absolute paths.
(Example: sources=/test/file1,/test/file2,/test/file3)
See also: See also:
{{{Sources}<<<sources>>>}}, {{{Sources}<<<sources>>>}},
{{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.concat {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.concat
@ -1761,7 +1757,7 @@ var tokenProperties =
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
|| Name | <<<sources>>> | || Name | <<<sources>>> |
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
|| Description | The comma seperated absolute paths used for concatenation. | || Description | A list of source paths. |
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
|| Type | String | || Type | String |
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
@ -1769,12 +1765,9 @@ var tokenProperties =
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
|| Valid Values | A list of comma seperated absolute FileSystem paths without scheme and authority. | || Valid Values | A list of comma seperated absolute FileSystem paths without scheme and authority. |
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
|| Syntax | See the note in {{Delegation}}. | || Syntax | Any string. |
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
<<Note>> that sources are absolute FileSystem paths.
See also: See also:
{{{Concat File(s)}<<<CONCAT>>>}} {{{Concat File(s)}<<<CONCAT>>>}}

View File

@ -17,18 +17,22 @@
*/ */
package org.apache.hadoop.hdfs.web.resources; package org.apache.hadoop.hdfs.web.resources;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import java.util.Arrays;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.util.StringUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
public class TestParam { public class TestParam {
public static final Log LOG = LogFactory.getLog(TestParam.class); public static final Log LOG = LogFactory.getLog(TestParam.class);
@ -265,4 +269,20 @@ public class TestParam {
UserParam userParam = new UserParam("a$"); UserParam userParam = new UserParam("a$");
assertNotNull(userParam.getValue()); assertNotNull(userParam.getValue());
} }
@Test
public void testConcatSourcesParam() {
final String[] strings = {"/", "/foo", "/bar"};
for(int n = 0; n < strings.length; n++) {
final String[] sub = new String[n];
final Path[] paths = new Path[n];
for(int i = 0; i < paths.length; i++) {
paths[i] = new Path(sub[i] = strings[i]);
}
final String expected = StringUtils.join(",", Arrays.asList(sub));
final ConcatSourcesParam computed = new ConcatSourcesParam(paths);
Assert.assertEquals(expected, computed.getValue());
}
}
} }