HDFS-2594. Support getDelegationTokens and createSymlink in WebHDFS.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1212299 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2011-12-09 09:26:33 +00:00
parent b4afbe6c8d
commit 9b1f47226b
11 changed files with 378 additions and 46 deletions

View File

@ -151,6 +151,9 @@ Release 0.23.1 - UNRELEASED
HDFS-2316. [umbrella] WebHDFS: a complete FileSystem implementation for HDFS-2316. [umbrella] WebHDFS: a complete FileSystem implementation for
accessing HDFS over HTTP (szetszwo) accessing HDFS over HTTP (szetszwo)
HDFS-2594. Support getDelegationTokens and createSymlink in WebHDFS.
(szetszwo)
IMPROVEMENTS IMPROVEMENTS
HDFS-2560. Refactor BPOfferService to be a static inner class (todd) HDFS-2560. Refactor BPOfferService to be a static inner class (todd)

View File

@ -66,6 +66,7 @@ import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.hdfs.web.resources.AccessTimeParam; import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
import org.apache.hadoop.hdfs.web.resources.BufferSizeParam; import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
import org.apache.hadoop.hdfs.web.resources.CreateParentParam;
import org.apache.hadoop.hdfs.web.resources.DelegationParam; import org.apache.hadoop.hdfs.web.resources.DelegationParam;
import org.apache.hadoop.hdfs.web.resources.DeleteOpParam; import org.apache.hadoop.hdfs.web.resources.DeleteOpParam;
import org.apache.hadoop.hdfs.web.resources.DestinationParam; import org.apache.hadoop.hdfs.web.resources.DestinationParam;
@ -245,12 +246,14 @@ public class NamenodeWebHdfsMethods {
final AccessTimeParam accessTime, final AccessTimeParam accessTime,
@QueryParam(RenameOptionSetParam.NAME) @DefaultValue(RenameOptionSetParam.DEFAULT) @QueryParam(RenameOptionSetParam.NAME) @DefaultValue(RenameOptionSetParam.DEFAULT)
final RenameOptionSetParam renameOptions, final RenameOptionSetParam renameOptions,
@QueryParam(CreateParentParam.NAME) @DefaultValue(CreateParentParam.DEFAULT)
final CreateParentParam createParent,
@QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT) @QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT)
final TokenArgumentParam delegationTokenArgument final TokenArgumentParam delegationTokenArgument
) throws IOException, InterruptedException { ) throws IOException, InterruptedException {
return put(ugi, delegation, username, doAsUser, ROOT, op, destination, return put(ugi, delegation, username, doAsUser, ROOT, op, destination,
owner, group, permission, overwrite, bufferSize, replication, owner, group, permission, overwrite, bufferSize, replication,
blockSize, modificationTime, accessTime, renameOptions, blockSize, modificationTime, accessTime, renameOptions, createParent,
delegationTokenArgument); delegationTokenArgument);
} }
@ -292,6 +295,8 @@ public class NamenodeWebHdfsMethods {
final AccessTimeParam accessTime, final AccessTimeParam accessTime,
@QueryParam(RenameOptionSetParam.NAME) @DefaultValue(RenameOptionSetParam.DEFAULT) @QueryParam(RenameOptionSetParam.NAME) @DefaultValue(RenameOptionSetParam.DEFAULT)
final RenameOptionSetParam renameOptions, final RenameOptionSetParam renameOptions,
@QueryParam(CreateParentParam.NAME) @DefaultValue(CreateParentParam.DEFAULT)
final CreateParentParam createParent,
@QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT) @QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT)
final TokenArgumentParam delegationTokenArgument final TokenArgumentParam delegationTokenArgument
) throws IOException, InterruptedException { ) throws IOException, InterruptedException {
@ -325,6 +330,12 @@ public class NamenodeWebHdfsMethods {
final String js = JsonUtil.toJsonString("boolean", b); final String js = JsonUtil.toJsonString("boolean", b);
return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
} }
case CREATESYMLINK:
{
np.createSymlink(destination.getValue(), fullpath,
PermissionParam.getDefaultFsPermission(), createParent.getValue());
return Response.ok().type(MediaType.APPLICATION_JSON).build();
}
case RENAME: case RENAME:
{ {
final EnumSet<Options.Rename> s = renameOptions.getValue(); final EnumSet<Options.Rename> s = renameOptions.getValue();
@ -578,6 +589,17 @@ public class NamenodeWebHdfsMethods {
final String js = JsonUtil.toJsonString(token); final String js = JsonUtil.toJsonString(token);
return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
} }
case GETDELEGATIONTOKENS:
{
if (delegation.getValue() != null) {
throw new IllegalArgumentException(delegation.getName()
+ " parameter is not null.");
}
final Token<? extends TokenIdentifier>[] tokens = new Token<?>[1];
tokens[0] = generateDelegationToken(namenode, ugi, renewer.getValue());
final String js = JsonUtil.toJsonString(tokens);
return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
}
case GETHOMEDIRECTORY: case GETHOMEDIRECTORY:
{ {
final String js = JsonUtil.toJsonString( final String js = JsonUtil.toJsonString(

View File

@ -97,6 +97,59 @@ public class JsonUtil {
return (Token<BlockTokenIdentifier>)toToken(m); return (Token<BlockTokenIdentifier>)toToken(m);
} }
/** Convert a Token[] to a JSON array. */
private static Object[] toJsonArray(final Token<? extends TokenIdentifier>[] array
) throws IOException {
if (array == null) {
return null;
} else if (array.length == 0) {
return EMPTY_OBJECT_ARRAY;
} else {
final Object[] a = new Object[array.length];
for(int i = 0; i < array.length; i++) {
a[i] = toJsonMap(array[i]);
}
return a;
}
}
/** Convert a token object to a JSON string. */
public static String toJsonString(final Token<? extends TokenIdentifier>[] tokens
) throws IOException {
if (tokens == null) {
return null;
}
final Map<String, Object> m = new TreeMap<String, Object>();
m.put(Token.class.getSimpleName(), toJsonArray(tokens));
return toJsonString(Token.class.getSimpleName() + "s", m);
}
/** Convert an Object[] to a List<Token<?>>. */
private static List<Token<?>> toTokenList(final Object[] objects) throws IOException {
if (objects == null) {
return null;
} else if (objects.length == 0) {
return Collections.emptyList();
} else {
final List<Token<?>> list = new ArrayList<Token<?>>(objects.length);
for(int i = 0; i < objects.length; i++) {
list.add(toToken((Map<?, ?>)objects[i]));
}
return list;
}
}
/** Convert a JSON map to a List<Token<?>>. */
public static List<Token<?>> toTokenList(final Map<?, ?> json) throws IOException {
if (json == null) {
return null;
}
final Map<?, ?> m = (Map<?, ?>)json.get(Token.class.getSimpleName() + "s");
return toTokenList((Object[])m.get(Token.class.getSimpleName()));
}
/** Convert an exception object to a Json string. */ /** Convert an exception object to a Json string. */
public static String toJsonString(final Exception e) { public static String toJsonString(final Exception e) {
final Map<String, Object> m = new TreeMap<String, Object>(); final Map<String, Object> m = new TreeMap<String, Object>();

View File

@ -29,7 +29,6 @@ import java.net.MalformedURLException;
import java.net.URI; import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.StringTokenizer; import java.util.StringTokenizer;
@ -64,6 +63,7 @@ import org.apache.hadoop.hdfs.server.namenode.SafeModeException;
import org.apache.hadoop.hdfs.web.resources.AccessTimeParam; import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
import org.apache.hadoop.hdfs.web.resources.BufferSizeParam; import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
import org.apache.hadoop.hdfs.web.resources.CreateParentParam;
import org.apache.hadoop.hdfs.web.resources.DeleteOpParam; import org.apache.hadoop.hdfs.web.resources.DeleteOpParam;
import org.apache.hadoop.hdfs.web.resources.DestinationParam; import org.apache.hadoop.hdfs.web.resources.DestinationParam;
import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam;
@ -318,8 +318,9 @@ public class WebHdfsFileSystem extends FileSystem
+ '&' + new UserParam(ugi) + '&' + new UserParam(ugi)
+ Param.toSortedString("&", parameters); + Param.toSortedString("&", parameters);
final URL url; final URL url;
if (op.equals(PutOpParam.Op.RENEWDELEGATIONTOKEN) if (op == PutOpParam.Op.RENEWDELEGATIONTOKEN
|| op.equals(GetOpParam.Op.GETDELEGATIONTOKEN)) { || op == GetOpParam.Op.GETDELEGATIONTOKEN
|| op == GetOpParam.Op.GETDELEGATIONTOKENS) {
// Skip adding delegation token for getting or renewing delegation token, // Skip adding delegation token for getting or renewing delegation token,
// because these operations require kerberos authentication. // because these operations require kerberos authentication.
url = getNamenodeURL(path, query); url = getNamenodeURL(path, query);
@ -458,6 +459,18 @@ public class WebHdfsFileSystem extends FileSystem
return (Boolean)json.get("boolean"); return (Boolean)json.get("boolean");
} }
/**
* Create a symlink pointing to the destination path.
* @see org.apache.hadoop.fs.Hdfs#createSymlink(Path, Path, boolean)
*/
public void createSymlink(Path destination, Path f, boolean createParent
) throws IOException {
statistics.incrementWriteOps(1);
final HttpOpParam.Op op = PutOpParam.Op.CREATESYMLINK;
run(op, f, new DestinationParam(makeQualified(destination).toUri().getPath()),
new CreateParentParam(createParent));
}
@Override @Override
public boolean rename(final Path src, final Path dst) throws IOException { public boolean rename(final Path src, final Path dst) throws IOException {
statistics.incrementWriteOps(1); statistics.incrementWriteOps(1);
@ -703,8 +716,13 @@ public class WebHdfsFileSystem extends FileSystem
@Override @Override
public List<Token<?>> getDelegationTokens(final String renewer public List<Token<?>> getDelegationTokens(final String renewer
) throws IOException { ) throws IOException {
final Token<?>[] t = {getDelegationToken(renewer)}; final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKENS;
return Arrays.asList(t); final Map<?, ?> m = run(op, null, new RenewerParam(renewer));
final List<Token<?>> tokens = JsonUtil.toTokenList(m);
for(Token<?> t : tokens) {
SecurityUtil.setTokenService(t, nnAddr);
}
return tokens;
} }
@Override @Override

View File

@ -0,0 +1,49 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.web.resources;
/** Create Parent parameter. */
public class CreateParentParam extends BooleanParam {
/** Parameter name. */
public static final String NAME = "createparent";
/** Default parameter value. */
public static final String DEFAULT = FALSE;
private static final Domain DOMAIN = new Domain(NAME);
/**
* Constructor.
* @param value the parameter value.
*/
public CreateParentParam(final Boolean value) {
super(DOMAIN, value);
}
/**
* Constructor.
* @param str a string representation of the parameter value.
*/
public CreateParentParam(final String str) {
this(DOMAIN.parse(str));
}
@Override
public String getName() {
return NAME;
}
}

View File

@ -32,6 +32,7 @@ public class GetOpParam extends HttpOpParam<GetOpParam.Op> {
GETHOMEDIRECTORY(HttpURLConnection.HTTP_OK), GETHOMEDIRECTORY(HttpURLConnection.HTTP_OK),
GETDELEGATIONTOKEN(HttpURLConnection.HTTP_OK), GETDELEGATIONTOKEN(HttpURLConnection.HTTP_OK),
GETDELEGATIONTOKENS(HttpURLConnection.HTTP_OK),
/** GET_BLOCK_LOCATIONS is a private unstable op. */ /** GET_BLOCK_LOCATIONS is a private unstable op. */
GET_BLOCK_LOCATIONS(HttpURLConnection.HTTP_OK), GET_BLOCK_LOCATIONS(HttpURLConnection.HTTP_OK),

View File

@ -29,6 +29,11 @@ public class PermissionParam extends ShortParam {
private static final Domain DOMAIN = new Domain(NAME, 8); private static final Domain DOMAIN = new Domain(NAME, 8);
private static final short DEFAULT_PERMISSION = 0755; private static final short DEFAULT_PERMISSION = 0755;
/** @return the default FsPermission. */
public static FsPermission getDefaultFsPermission() {
return new FsPermission(DEFAULT_PERMISSION);
}
/** /**
* Constructor. * Constructor.

View File

@ -26,6 +26,7 @@ public class PutOpParam extends HttpOpParam<PutOpParam.Op> {
CREATE(true, HttpURLConnection.HTTP_CREATED), CREATE(true, HttpURLConnection.HTTP_CREATED),
MKDIRS(false, HttpURLConnection.HTTP_OK), MKDIRS(false, HttpURLConnection.HTTP_OK),
CREATESYMLINK(false, HttpURLConnection.HTTP_OK),
RENAME(false, HttpURLConnection.HTTP_OK), RENAME(false, HttpURLConnection.HTTP_OK),
SETREPLICATION(false, HttpURLConnection.HTTP_OK), SETREPLICATION(false, HttpURLConnection.HTTP_OK),

View File

@ -17,29 +17,28 @@
*/ */
package org.apache.hadoop.fs; package org.apache.hadoop.fs;
import java.io.*; import static org.apache.hadoop.fs.FileContextTestHelper.getAbsoluteTestRootDir;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.net.URI; import java.net.URI;
import org.apache.commons.logging.impl.Log4JLogger; import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.log4j.Level;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import static org.apache.hadoop.fs.FileContextTestHelper.*; import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.log4j.Level;
import static org.junit.Assert.*;
import org.junit.Test;
import org.junit.BeforeClass;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
/** /**
* Test symbolic links using FileContext and Hdfs. * Test symbolic links using FileContext and Hdfs.
@ -51,6 +50,8 @@ public class TestFcHdfsSymlink extends FileContextSymlinkBaseTest {
} }
private static MiniDFSCluster cluster; private static MiniDFSCluster cluster;
private static WebHdfsFileSystem webhdfs;
protected String getScheme() { protected String getScheme() {
return "hdfs"; return "hdfs";
@ -79,10 +80,11 @@ public class TestFcHdfsSymlink extends FileContextSymlinkBaseTest {
@BeforeClass @BeforeClass
public static void testSetUp() throws Exception { public static void testSetUp() throws Exception {
Configuration conf = new HdfsConfiguration(); Configuration conf = new HdfsConfiguration();
conf.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, true); conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true);
conf.set(FsPermission.UMASK_LABEL, "000"); conf.set(FsPermission.UMASK_LABEL, "000");
cluster = new MiniDFSCluster.Builder(conf).build(); cluster = new MiniDFSCluster.Builder(conf).build();
fc = FileContext.getFileContext(cluster.getURI(0)); fc = FileContext.getFileContext(cluster.getURI(0));
webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf);
} }
@AfterClass @AfterClass
@ -263,4 +265,17 @@ public class TestFcHdfsSymlink extends FileContextSymlinkBaseTest {
FileStatus statLink = fc.getFileStatus(link); FileStatus statLink = fc.getFileStatus(link);
assertEquals(statLink.getOwner(), statFile.getOwner()); assertEquals(statLink.getOwner(), statFile.getOwner());
} }
@Test
/** Test WebHdfsFileSystem.craeteSymlink(..). */
public void testWebHDFS() throws IOException {
Path file = new Path(testBaseDir1(), "file");
Path link = new Path(testBaseDir1(), "linkToFile");
createAndWriteFile(file);
webhdfs.createSymlink(file, link, false);
fc.setReplication(link, (short)2);
assertEquals(0, fc.getFileLinkStatus(link).getReplication());
assertEquals(2, fc.getFileStatus(link).getReplication());
assertEquals(2, fc.getFileStatus(file).getReplication());
}
} }

View File

@ -25,6 +25,7 @@ import java.io.DataInputStream;
import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -183,23 +184,44 @@ public class TestDelegationToken {
} }
}); });
final Token<DelegationTokenIdentifier> token = webhdfs { //test getDelegationToken(..)
.getDelegationToken("JobTracker"); final Token<DelegationTokenIdentifier> token = webhdfs
DelegationTokenIdentifier identifier = new DelegationTokenIdentifier(); .getDelegationToken("JobTracker");
byte[] tokenId = token.getIdentifier(); DelegationTokenIdentifier identifier = new DelegationTokenIdentifier();
identifier byte[] tokenId = token.getIdentifier();
.readFields(new DataInputStream(new ByteArrayInputStream(tokenId))); identifier.readFields(new DataInputStream(new ByteArrayInputStream(tokenId)));
LOG.info("A valid token should have non-null password, and should be renewed successfully"); LOG.info("A valid token should have non-null password, and should be renewed successfully");
Assert.assertTrue(null != dtSecretManager.retrievePassword(identifier)); Assert.assertTrue(null != dtSecretManager.retrievePassword(identifier));
dtSecretManager.renewToken(token, "JobTracker"); dtSecretManager.renewToken(token, "JobTracker");
ugi.doAs(new PrivilegedExceptionAction<Object>() { ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override @Override
public Object run() throws Exception { public Void run() throws Exception {
token.renew(config); token.renew(config);
token.cancel(config); token.cancel(config);
return null; return null;
} }
}); });
}
{ //test getDelegationTokens(..)
final List<Token<?>> tokenlist = webhdfs.getDelegationTokens("JobTracker");
DelegationTokenIdentifier identifier = new DelegationTokenIdentifier();
@SuppressWarnings("unchecked")
final Token<DelegationTokenIdentifier> token = (Token<DelegationTokenIdentifier>)tokenlist.get(0);
byte[] tokenId = token.getIdentifier();
identifier.readFields(new DataInputStream(new ByteArrayInputStream(tokenId)));
LOG.info("A valid token should have non-null password, and should be renewed successfully");
Assert.assertTrue(null != dtSecretManager.retrievePassword(identifier));
dtSecretManager.renewToken(token, "JobTracker");
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
token.renew(config);
token.cancel(config);
return null;
}
});
}
} }
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")

View File

@ -38,8 +38,9 @@ WebHDFS REST API
* {Introduction} * {Introduction}
The HTTP REST API supports the complete The HTTP REST API supports the complete
{{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}} interface for HDFS. {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}/{{{../../api/org/apache/hadoop/fs/FileContext.html}FileContext}}
The operations and the corresponding FileSystem methods are shown in the next section. interface for HDFS.
The operations and the corresponding FileSystem/FileContext methods are shown in the next section.
The Section {{HTTP Query Parameter Dictionary}} specifies the parameter details The Section {{HTTP Query Parameter Dictionary}} specifies the parameter details
such as the defaults and the valid values. such as the defaults and the valid values.
@ -68,6 +69,9 @@ WebHDFS REST API
* {{{Get Delegation Token}<<<GETDELEGATIONTOKEN>>>}} * {{{Get Delegation Token}<<<GETDELEGATIONTOKEN>>>}}
(see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.getDelegationToken) (see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.getDelegationToken)
* {{{Get Delegation Tokens}<<<GETDELEGATIONTOKENS>>>}}
(see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.getDelegationTokens)
* HTTP PUT * HTTP PUT
* {{{Create and Write to a File}<<<CREATE>>>}} * {{{Create and Write to a File}<<<CREATE>>>}}
@ -76,6 +80,9 @@ WebHDFS REST API
* {{{Make a Directory}<<<MKDIRS>>>}} * {{{Make a Directory}<<<MKDIRS>>>}}
(see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.mkdirs) (see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.mkdirs)
* {{{Create a Symbolic Link}<<<CREATESYMLINK>>>}}
(see {{{../../api/org/apache/hadoop/fs/FileContext.html}FileContext}}.createSymlink)
* {{{Rename a File/Directory}<<<RENAME>>>}} * {{{Rename a File/Directory}<<<RENAME>>>}}
(see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.rename) (see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.rename)
@ -129,6 +136,20 @@ WebHDFS REST API
http://<HOST>:<HTTP_PORT>/webhdfs/v1/<PATH>?op=... http://<HOST>:<HTTP_PORT>/webhdfs/v1/<PATH>?op=...
+--------------------------------- +---------------------------------
** {HDFS Configuration Options}
Below are the HDFS configuration options for WebHDFS.
*-------------------------------------------------+---------------------------------------------------+
|| Property Name || Description |
*-------------------------------------------------+---------------------------------------------------+
| <<<dfs.webhdfs.enabled >>> | Enable/disable WebHDFS in Namenodes and Datanodes |
*-------------------------------------------------+---------------------------------------------------+
| <<<dfs.web.authentication.kerberos.principal>>> | The HTTP Kerberos principal used by Hadoop-Auth in the HTTP endpoint. The HTTP Kerberos principal MUST start with 'HTTP/' per Kerberos HTTP SPENGO specification. |
*-------------------------------------------------+---------------------------------------------------+
| <<<dfs.web.authentication.kerberos.keytab >>> | The Kerberos keytab file with the credentials for the HTTP Kerberos principal used by Hadoop-Auth in the HTTP endpoint. |
*-------------------------------------------------+---------------------------------------------------+
* {Authentication} * {Authentication}
When security is <off>, the authenticated user is the username specified in the <<<user.name>>> query parameter. When security is <off>, the authenticated user is the username specified in the <<<user.name>>> query parameter.
@ -339,6 +360,30 @@ Transfer-Encoding: chunked
{{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.mkdirs {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.mkdirs
** {Create a Symbolic Link}
* Submit a HTTP PUT request.
+---------------------------------
curl -i -X PUT "http://<HOST>:<PORT>/<PATH>?op=CREATESYMLINK
&destination=<PATH>[&createParent=<true|false>]"
+---------------------------------
The client receives a response with zero content length:
+---------------------------------
HTTP/1.1 200 OK
Content-Length: 0
+---------------------------------
[]
See also:
{{{Destination}<<<destination>>>}},
{{{Create Parent}<<<createParent>>>}},
{{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.createSymlink
** {Rename a File/Directory} ** {Rename a File/Directory}
* Submit a HTTP PUT request. * Submit a HTTP PUT request.
@ -712,6 +757,41 @@ Transfer-Encoding: chunked
{{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.getDelegationToken {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.getDelegationToken
** {Get Delegation Tokens}
* Submit a HTTP GET request.
+---------------------------------
curl -i "http://<HOST>:<PORT>/webhdfs/v1/?op=GETDELEGATIONTOKENS&renewer=<USER>"
+---------------------------------
The client receives a response with a {{{Tokens JSON Schema}<<<Tokens>>> JSON object}}:
+---------------------------------
HTTP/1.1 200 OK
Content-Type: application/json
Transfer-Encoding: chunked
{
"Tokens":
{
"Token":
[
{
"urlString":"KAAKSm9i ..."
}
]
}
}
+---------------------------------
[]
See also:
{{{Renewer}<<<renewer>>>}},
{{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.getDelegationTokens
** {Renew Delegation Token} ** {Renew Delegation Token}
* Submit a HTTP PUT request. * Submit a HTTP PUT request.
@ -1209,16 +1289,56 @@ var fileStatusProperties =
"name" : "Token", "name" : "Token",
"properties": "properties":
{ {
"Token": "Token": tokenProperties //See Token Properties
}
}
+---------------------------------
See also:
{{{Token Properties}<<<Token>>> Properties}},
{{{Get Delegation Token}<<<GETDELEGATIONTOKEN>>>}},
the note in {{Delegation}}.
*** {Token Properties}
JavaScript syntax is used to define <<<tokenProperties>>>
so that it can be referred in both <<<Token>>> and <<<Tokens>>> JSON schemas.
+---------------------------------
var tokenProperties =
{
"type" : "object",
"properties":
{
"urlString":
{
"description": "A delegation token encoded as a URL safe string.",
"type" : "string",
"required" : true
}
}
}
+---------------------------------
** {Tokens JSON Schema}
A <<<Tokens>>> JSON object represents an array of <<<Token>>> JSON objects.
+---------------------------------
{
"name" : "Tokens",
"properties":
{
"Tokens":
{ {
"type" : "object", "type" : "object",
"properties": "properties":
{ {
"urlString": "Token":
{ {
"description": "A delegation token encoded as a URL safe string.", "description": "An array of Token",
"type" : "string", "type" : "array",
"required" : true "items" : "Token": tokenProperties //See Token Properties
} }
} }
} }
@ -1227,7 +1347,8 @@ var fileStatusProperties =
+--------------------------------- +---------------------------------
See also: See also:
{{{Get Delegation Token}<<<GETDELEGATIONTOKEN>>>}}, {{{Token Properties}<<<Token>>> Properties}},
{{{Get Delegation Tokens}<<<GETDELEGATIONTOKENS>>>}},
the note in {{Delegation}}. the note in {{Delegation}}.
@ -1295,6 +1416,26 @@ var fileStatusProperties =
{{{Open and Read a File}<<<OPEN>>>}} {{{Open and Read a File}<<<OPEN>>>}}
** {Create Parent}
*----------------+-------------------------------------------------------------------+
|| Name | <<<createparent>>> |
*----------------+-------------------------------------------------------------------+
|| Description | If the parent directories do not exist, should they be created? |
*----------------+-------------------------------------------------------------------+
|| Type | boolean |
*----------------+-------------------------------------------------------------------+
|| Default Value | false |
*----------------+-------------------------------------------------------------------+
|| Valid Values | true | false |
*----------------+-------------------------------------------------------------------+
|| Syntax | true | false |
*----------------+-------------------------------------------------------------------+
See also:
{{{Create a Symbolic Link}<<<CREATESYMLINK>>>}}
** {Delegation} ** {Delegation}
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
@ -1326,7 +1467,7 @@ var fileStatusProperties =
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
|| Name | <<<destination>>> | || Name | <<<destination>>> |
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
|| Description | The destination path used in {{{Rename a File/Directory}<<<RENAME>>>}}. | || Description | The destination path. |
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
|| Type | Path | || Type | Path |
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
@ -1338,6 +1479,7 @@ var fileStatusProperties =
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
See also: See also:
{{{Create a Symbolic Link}<<<CREATESYMLINK>>>}},
{{{Rename a File/Directory}<<<RENAME>>>}} {{{Rename a File/Directory}<<<RENAME>>>}}
@ -1560,7 +1702,8 @@ var fileStatusProperties =
*----------------+-------------------------------------------------------------------+ *----------------+-------------------------------------------------------------------+
See also: See also:
{{{Get Delegation Token}<<<GETDELEGATIONTOKEN>>>}} {{{Get Delegation Token}<<<GETDELEGATIONTOKEN>>>}},
{{{Get Delegation Tokens}<<<GETDELEGATIONTOKENS>>>}}
** {Replication} ** {Replication}