diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index df736a6d1d3..259db34a4be 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -151,6 +151,9 @@ Release 0.23.1 - UNRELEASED HDFS-2316. [umbrella] WebHDFS: a complete FileSystem implementation for accessing HDFS over HTTP (szetszwo) + HDFS-2594. Support getDelegationTokens and createSymlink in WebHDFS. + (szetszwo) + IMPROVEMENTS HDFS-2560. Refactor BPOfferService to be a static inner class (todd) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java index 2af0aad1ce9..9529ecf766b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java @@ -66,6 +66,7 @@ import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.resources.AccessTimeParam; import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; import org.apache.hadoop.hdfs.web.resources.BufferSizeParam; +import org.apache.hadoop.hdfs.web.resources.CreateParentParam; import org.apache.hadoop.hdfs.web.resources.DelegationParam; import org.apache.hadoop.hdfs.web.resources.DeleteOpParam; import org.apache.hadoop.hdfs.web.resources.DestinationParam; @@ -245,12 +246,14 @@ public class NamenodeWebHdfsMethods { final AccessTimeParam accessTime, @QueryParam(RenameOptionSetParam.NAME) @DefaultValue(RenameOptionSetParam.DEFAULT) final RenameOptionSetParam renameOptions, + @QueryParam(CreateParentParam.NAME) @DefaultValue(CreateParentParam.DEFAULT) + final CreateParentParam createParent, @QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT) final TokenArgumentParam delegationTokenArgument ) throws IOException, InterruptedException { return put(ugi, delegation, username, doAsUser, ROOT, op, destination, owner, group, permission, overwrite, bufferSize, replication, - blockSize, modificationTime, accessTime, renameOptions, + blockSize, modificationTime, accessTime, renameOptions, createParent, delegationTokenArgument); } @@ -292,6 +295,8 @@ public class NamenodeWebHdfsMethods { final AccessTimeParam accessTime, @QueryParam(RenameOptionSetParam.NAME) @DefaultValue(RenameOptionSetParam.DEFAULT) final RenameOptionSetParam renameOptions, + @QueryParam(CreateParentParam.NAME) @DefaultValue(CreateParentParam.DEFAULT) + final CreateParentParam createParent, @QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT) final TokenArgumentParam delegationTokenArgument ) throws IOException, InterruptedException { @@ -325,6 +330,12 @@ public class NamenodeWebHdfsMethods { final String js = JsonUtil.toJsonString("boolean", b); return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } + case CREATESYMLINK: + { + np.createSymlink(destination.getValue(), fullpath, + PermissionParam.getDefaultFsPermission(), createParent.getValue()); + return Response.ok().type(MediaType.APPLICATION_JSON).build(); + } case RENAME: { final EnumSet s = renameOptions.getValue(); @@ -578,6 +589,17 @@ public class NamenodeWebHdfsMethods { final String js = JsonUtil.toJsonString(token); return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } + case GETDELEGATIONTOKENS: + { + if (delegation.getValue() != null) { + throw new IllegalArgumentException(delegation.getName() + + " parameter is not null."); + } + final Token[] tokens = new Token[1]; + tokens[0] = generateDelegationToken(namenode, ugi, renewer.getValue()); + final String js = JsonUtil.toJsonString(tokens); + return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); + } case GETHOMEDIRECTORY: { final String js = JsonUtil.toJsonString( diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java index a1c5d1ebd1c..970f1dc610d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java @@ -97,6 +97,59 @@ public class JsonUtil { return (Token)toToken(m); } + /** Convert a Token[] to a JSON array. */ + private static Object[] toJsonArray(final Token[] array + ) throws IOException { + if (array == null) { + return null; + } else if (array.length == 0) { + return EMPTY_OBJECT_ARRAY; + } else { + final Object[] a = new Object[array.length]; + for(int i = 0; i < array.length; i++) { + a[i] = toJsonMap(array[i]); + } + return a; + } + } + + /** Convert a token object to a JSON string. */ + public static String toJsonString(final Token[] tokens + ) throws IOException { + if (tokens == null) { + return null; + } + + final Map m = new TreeMap(); + m.put(Token.class.getSimpleName(), toJsonArray(tokens)); + return toJsonString(Token.class.getSimpleName() + "s", m); + } + + /** Convert an Object[] to a List>. */ + private static List> toTokenList(final Object[] objects) throws IOException { + if (objects == null) { + return null; + } else if (objects.length == 0) { + return Collections.emptyList(); + } else { + final List> list = new ArrayList>(objects.length); + for(int i = 0; i < objects.length; i++) { + list.add(toToken((Map)objects[i])); + } + return list; + } + } + + /** Convert a JSON map to a List>. */ + public static List> toTokenList(final Map json) throws IOException { + if (json == null) { + return null; + } + + final Map m = (Map)json.get(Token.class.getSimpleName() + "s"); + return toTokenList((Object[])m.get(Token.class.getSimpleName())); + } + /** Convert an exception object to a Json string. */ public static String toJsonString(final Exception e) { final Map m = new TreeMap(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index 7b7f20b12d8..01006d1aeef 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -29,7 +29,6 @@ import java.net.MalformedURLException; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; -import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.StringTokenizer; @@ -64,6 +63,7 @@ import org.apache.hadoop.hdfs.server.namenode.SafeModeException; import org.apache.hadoop.hdfs.web.resources.AccessTimeParam; import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; import org.apache.hadoop.hdfs.web.resources.BufferSizeParam; +import org.apache.hadoop.hdfs.web.resources.CreateParentParam; import org.apache.hadoop.hdfs.web.resources.DeleteOpParam; import org.apache.hadoop.hdfs.web.resources.DestinationParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; @@ -318,8 +318,9 @@ public class WebHdfsFileSystem extends FileSystem + '&' + new UserParam(ugi) + Param.toSortedString("&", parameters); final URL url; - if (op.equals(PutOpParam.Op.RENEWDELEGATIONTOKEN) - || op.equals(GetOpParam.Op.GETDELEGATIONTOKEN)) { + if (op == PutOpParam.Op.RENEWDELEGATIONTOKEN + || op == GetOpParam.Op.GETDELEGATIONTOKEN + || op == GetOpParam.Op.GETDELEGATIONTOKENS) { // Skip adding delegation token for getting or renewing delegation token, // because these operations require kerberos authentication. url = getNamenodeURL(path, query); @@ -458,6 +459,18 @@ public class WebHdfsFileSystem extends FileSystem return (Boolean)json.get("boolean"); } + /** + * Create a symlink pointing to the destination path. + * @see org.apache.hadoop.fs.Hdfs#createSymlink(Path, Path, boolean) + */ + public void createSymlink(Path destination, Path f, boolean createParent + ) throws IOException { + statistics.incrementWriteOps(1); + final HttpOpParam.Op op = PutOpParam.Op.CREATESYMLINK; + run(op, f, new DestinationParam(makeQualified(destination).toUri().getPath()), + new CreateParentParam(createParent)); + } + @Override public boolean rename(final Path src, final Path dst) throws IOException { statistics.incrementWriteOps(1); @@ -703,8 +716,13 @@ public class WebHdfsFileSystem extends FileSystem @Override public List> getDelegationTokens(final String renewer ) throws IOException { - final Token[] t = {getDelegationToken(renewer)}; - return Arrays.asList(t); + final HttpOpParam.Op op = GetOpParam.Op.GETDELEGATIONTOKENS; + final Map m = run(op, null, new RenewerParam(renewer)); + final List> tokens = JsonUtil.toTokenList(m); + for(Token t : tokens) { + SecurityUtil.setTokenService(t, nnAddr); + } + return tokens; } @Override diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/CreateParentParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/CreateParentParam.java new file mode 100644 index 00000000000..81525154be5 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/CreateParentParam.java @@ -0,0 +1,49 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.web.resources; + +/** Create Parent parameter. */ +public class CreateParentParam extends BooleanParam { + /** Parameter name. */ + public static final String NAME = "createparent"; + /** Default parameter value. */ + public static final String DEFAULT = FALSE; + + private static final Domain DOMAIN = new Domain(NAME); + + /** + * Constructor. + * @param value the parameter value. + */ + public CreateParentParam(final Boolean value) { + super(DOMAIN, value); + } + + /** + * Constructor. + * @param str a string representation of the parameter value. + */ + public CreateParentParam(final String str) { + this(DOMAIN.parse(str)); + } + + @Override + public String getName() { + return NAME; + } +} \ No newline at end of file diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java index 2a418ce8524..34d6e12ecf9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java @@ -32,6 +32,7 @@ public class GetOpParam extends HttpOpParam { GETHOMEDIRECTORY(HttpURLConnection.HTTP_OK), GETDELEGATIONTOKEN(HttpURLConnection.HTTP_OK), + GETDELEGATIONTOKENS(HttpURLConnection.HTTP_OK), /** GET_BLOCK_LOCATIONS is a private unstable op. */ GET_BLOCK_LOCATIONS(HttpURLConnection.HTTP_OK), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PermissionParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PermissionParam.java index d283423fa0d..b22b9c399c2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PermissionParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PermissionParam.java @@ -29,6 +29,11 @@ public class PermissionParam extends ShortParam { private static final Domain DOMAIN = new Domain(NAME, 8); private static final short DEFAULT_PERMISSION = 0755; + + /** @return the default FsPermission. */ + public static FsPermission getDefaultFsPermission() { + return new FsPermission(DEFAULT_PERMISSION); + } /** * Constructor. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java index 45119a93805..b6fc9198801 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java @@ -26,6 +26,7 @@ public class PutOpParam extends HttpOpParam { CREATE(true, HttpURLConnection.HTTP_CREATED), MKDIRS(false, HttpURLConnection.HTTP_OK), + CREATESYMLINK(false, HttpURLConnection.HTTP_OK), RENAME(false, HttpURLConnection.HTTP_OK), SETREPLICATION(false, HttpURLConnection.HTTP_OK), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestFcHdfsSymlink.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestFcHdfsSymlink.java index 3f74789ae96..faa2d71a575 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestFcHdfsSymlink.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestFcHdfsSymlink.java @@ -17,29 +17,28 @@ */ package org.apache.hadoop.fs; -import java.io.*; +import static org.apache.hadoop.fs.FileContextTestHelper.getAbsoluteTestRootDir; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.io.IOException; import java.net.URI; import org.apache.commons.logging.impl.Log4JLogger; -import org.apache.log4j.Level; - import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileContext; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; -import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.protocol.HdfsConstants; -import static org.apache.hadoop.fs.FileContextTestHelper.*; +import org.apache.hadoop.hdfs.server.namenode.NameNode; +import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; +import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; import org.apache.hadoop.ipc.RemoteException; - -import static org.junit.Assert.*; -import org.junit.Test; -import org.junit.BeforeClass; +import org.apache.log4j.Level; import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; /** * Test symbolic links using FileContext and Hdfs. @@ -51,6 +50,8 @@ public class TestFcHdfsSymlink extends FileContextSymlinkBaseTest { } private static MiniDFSCluster cluster; + private static WebHdfsFileSystem webhdfs; + protected String getScheme() { return "hdfs"; @@ -79,10 +80,11 @@ public class TestFcHdfsSymlink extends FileContextSymlinkBaseTest { @BeforeClass public static void testSetUp() throws Exception { Configuration conf = new HdfsConfiguration(); - conf.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, true); + conf.setBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, true); conf.set(FsPermission.UMASK_LABEL, "000"); cluster = new MiniDFSCluster.Builder(conf).build(); fc = FileContext.getFileContext(cluster.getURI(0)); + webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf); } @AfterClass @@ -263,4 +265,17 @@ public class TestFcHdfsSymlink extends FileContextSymlinkBaseTest { FileStatus statLink = fc.getFileStatus(link); assertEquals(statLink.getOwner(), statFile.getOwner()); } + + @Test + /** Test WebHdfsFileSystem.craeteSymlink(..). */ + public void testWebHDFS() throws IOException { + Path file = new Path(testBaseDir1(), "file"); + Path link = new Path(testBaseDir1(), "linkToFile"); + createAndWriteFile(file); + webhdfs.createSymlink(file, link, false); + fc.setReplication(link, (short)2); + assertEquals(0, fc.getFileLinkStatus(link).getReplication()); + assertEquals(2, fc.getFileStatus(link).getReplication()); + assertEquals(2, fc.getFileStatus(file).getReplication()); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationToken.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationToken.java index 7808e09b900..4d18e98d1db 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationToken.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationToken.java @@ -25,6 +25,7 @@ import java.io.DataInputStream; import java.io.IOException; import java.net.URI; import java.security.PrivilegedExceptionAction; +import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -183,23 +184,44 @@ public class TestDelegationToken { } }); - final Token token = webhdfs - .getDelegationToken("JobTracker"); - DelegationTokenIdentifier identifier = new DelegationTokenIdentifier(); - byte[] tokenId = token.getIdentifier(); - identifier - .readFields(new DataInputStream(new ByteArrayInputStream(tokenId))); - LOG.info("A valid token should have non-null password, and should be renewed successfully"); - Assert.assertTrue(null != dtSecretManager.retrievePassword(identifier)); - dtSecretManager.renewToken(token, "JobTracker"); - ugi.doAs(new PrivilegedExceptionAction() { - @Override - public Object run() throws Exception { - token.renew(config); - token.cancel(config); - return null; - } - }); + { //test getDelegationToken(..) + final Token token = webhdfs + .getDelegationToken("JobTracker"); + DelegationTokenIdentifier identifier = new DelegationTokenIdentifier(); + byte[] tokenId = token.getIdentifier(); + identifier.readFields(new DataInputStream(new ByteArrayInputStream(tokenId))); + LOG.info("A valid token should have non-null password, and should be renewed successfully"); + Assert.assertTrue(null != dtSecretManager.retrievePassword(identifier)); + dtSecretManager.renewToken(token, "JobTracker"); + ugi.doAs(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + token.renew(config); + token.cancel(config); + return null; + } + }); + } + + { //test getDelegationTokens(..) + final List> tokenlist = webhdfs.getDelegationTokens("JobTracker"); + DelegationTokenIdentifier identifier = new DelegationTokenIdentifier(); + @SuppressWarnings("unchecked") + final Token token = (Token)tokenlist.get(0); + byte[] tokenId = token.getIdentifier(); + identifier.readFields(new DataInputStream(new ByteArrayInputStream(tokenId))); + LOG.info("A valid token should have non-null password, and should be renewed successfully"); + Assert.assertTrue(null != dtSecretManager.retrievePassword(identifier)); + dtSecretManager.renewToken(token, "JobTracker"); + ugi.doAs(new PrivilegedExceptionAction() { + @Override + public Void run() throws Exception { + token.renew(config); + token.cancel(config); + return null; + } + }); + } } @SuppressWarnings("deprecation") diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/WebHDFS.apt.vm b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/WebHDFS.apt.vm index f8cd404f623..b776e21b0e4 100644 --- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/WebHDFS.apt.vm +++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/WebHDFS.apt.vm @@ -38,8 +38,9 @@ WebHDFS REST API * {Introduction} The HTTP REST API supports the complete - {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}} interface for HDFS. - The operations and the corresponding FileSystem methods are shown in the next section. + {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}/{{{../../api/org/apache/hadoop/fs/FileContext.html}FileContext}} + interface for HDFS. + The operations and the corresponding FileSystem/FileContext methods are shown in the next section. The Section {{HTTP Query Parameter Dictionary}} specifies the parameter details such as the defaults and the valid values. @@ -68,6 +69,9 @@ WebHDFS REST API * {{{Get Delegation Token}<<>>}} (see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.getDelegationToken) + * {{{Get Delegation Tokens}<<>>}} + (see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.getDelegationTokens) + * HTTP PUT * {{{Create and Write to a File}<<>>}} @@ -76,6 +80,9 @@ WebHDFS REST API * {{{Make a Directory}<<>>}} (see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.mkdirs) + * {{{Create a Symbolic Link}<<>>}} + (see {{{../../api/org/apache/hadoop/fs/FileContext.html}FileContext}}.createSymlink) + * {{{Rename a File/Directory}<<>>}} (see {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.rename) @@ -129,6 +136,20 @@ WebHDFS REST API http://:/webhdfs/v1/?op=... +--------------------------------- +** {HDFS Configuration Options} + + Below are the HDFS configuration options for WebHDFS. + +*-------------------------------------------------+---------------------------------------------------+ +|| Property Name || Description | +*-------------------------------------------------+---------------------------------------------------+ +| <<>> | Enable/disable WebHDFS in Namenodes and Datanodes | +*-------------------------------------------------+---------------------------------------------------+ +| <<>> | The HTTP Kerberos principal used by Hadoop-Auth in the HTTP endpoint. The HTTP Kerberos principal MUST start with 'HTTP/' per Kerberos HTTP SPENGO specification. | +*-------------------------------------------------+---------------------------------------------------+ +| <<>> | The Kerberos keytab file with the credentials for the HTTP Kerberos principal used by Hadoop-Auth in the HTTP endpoint. | +*-------------------------------------------------+---------------------------------------------------+ + * {Authentication} When security is , the authenticated user is the username specified in the <<>> query parameter. @@ -339,6 +360,30 @@ Transfer-Encoding: chunked {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.mkdirs +** {Create a Symbolic Link} + + * Submit a HTTP PUT request. + ++--------------------------------- +curl -i -X PUT "http://:/?op=CREATESYMLINK + &destination=[&createParent=]" ++--------------------------------- + + The client receives a response with zero content length: + ++--------------------------------- +HTTP/1.1 200 OK +Content-Length: 0 ++--------------------------------- + + [] + + See also: + {{{Destination}<<>>}}, + {{{Create Parent}<<>>}}, + {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.createSymlink + + ** {Rename a File/Directory} * Submit a HTTP PUT request. @@ -712,6 +757,41 @@ Transfer-Encoding: chunked {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.getDelegationToken +** {Get Delegation Tokens} + + * Submit a HTTP GET request. + ++--------------------------------- +curl -i "http://:/webhdfs/v1/?op=GETDELEGATIONTOKENS&renewer=" ++--------------------------------- + + The client receives a response with a {{{Tokens JSON Schema}<<>> JSON object}}: + ++--------------------------------- +HTTP/1.1 200 OK +Content-Type: application/json +Transfer-Encoding: chunked + +{ + "Tokens": + { + "Token": + [ + { + "urlString":"KAAKSm9i ..." + } + ] + } +} ++--------------------------------- + + [] + + See also: + {{{Renewer}<<>>}}, + {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.getDelegationTokens + + ** {Renew Delegation Token} * Submit a HTTP PUT request. @@ -1209,16 +1289,56 @@ var fileStatusProperties = "name" : "Token", "properties": { - "Token": + "Token": tokenProperties //See Token Properties + } +} ++--------------------------------- + + See also: + {{{Token Properties}<<>> Properties}}, + {{{Get Delegation Token}<<>>}}, + the note in {{Delegation}}. + +*** {Token Properties} + + JavaScript syntax is used to define <<>> + so that it can be referred in both <<>> and <<>> JSON schemas. + ++--------------------------------- +var tokenProperties = +{ + "type" : "object", + "properties": + { + "urlString": + { + "description": "A delegation token encoded as a URL safe string.", + "type" : "string", + "required" : true + } + } +} ++--------------------------------- + +** {Tokens JSON Schema} + + A <<>> JSON object represents an array of <<>> JSON objects. + ++--------------------------------- +{ + "name" : "Tokens", + "properties": + { + "Tokens": { "type" : "object", "properties": { - "urlString": + "Token": { - "description": "A delegation token encoded as a URL safe string.", - "type" : "string", - "required" : true + "description": "An array of Token", + "type" : "array", + "items" : "Token": tokenProperties //See Token Properties } } } @@ -1227,7 +1347,8 @@ var fileStatusProperties = +--------------------------------- See also: - {{{Get Delegation Token}<<>>}}, + {{{Token Properties}<<>> Properties}}, + {{{Get Delegation Tokens}<<>>}}, the note in {{Delegation}}. @@ -1295,6 +1416,26 @@ var fileStatusProperties = {{{Open and Read a File}<<>>}} +** {Create Parent} + +*----------------+-------------------------------------------------------------------+ +|| Name | <<>> | +*----------------+-------------------------------------------------------------------+ +|| Description | If the parent directories do not exist, should they be created? | +*----------------+-------------------------------------------------------------------+ +|| Type | boolean | +*----------------+-------------------------------------------------------------------+ +|| Default Value | false | +*----------------+-------------------------------------------------------------------+ +|| Valid Values | true | false | +*----------------+-------------------------------------------------------------------+ +|| Syntax | true | false | +*----------------+-------------------------------------------------------------------+ + + See also: + {{{Create a Symbolic Link}<<>>}} + + ** {Delegation} *----------------+-------------------------------------------------------------------+ @@ -1326,7 +1467,7 @@ var fileStatusProperties = *----------------+-------------------------------------------------------------------+ || Name | <<>> | *----------------+-------------------------------------------------------------------+ -|| Description | The destination path used in {{{Rename a File/Directory}<<>>}}. | +|| Description | The destination path. | *----------------+-------------------------------------------------------------------+ || Type | Path | *----------------+-------------------------------------------------------------------+ @@ -1338,6 +1479,7 @@ var fileStatusProperties = *----------------+-------------------------------------------------------------------+ See also: + {{{Create a Symbolic Link}<<>>}}, {{{Rename a File/Directory}<<>>}} @@ -1560,7 +1702,8 @@ var fileStatusProperties = *----------------+-------------------------------------------------------------------+ See also: - {{{Get Delegation Token}<<>>}} + {{{Get Delegation Token}<<>>}}, + {{{Get Delegation Tokens}<<>>}} ** {Replication}