HADOOP-6994. Api to get delegation token in AbstractFileSystem. Contributed by jitendra.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1091618 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jitendra Nath Pandey 2011-04-13 00:42:34 +00:00
parent f5efc187e5
commit d03e5b75bc
8 changed files with 213 additions and 1 deletions

View File

@ -18,6 +18,8 @@ Trunk (unreleased changes)
HADOOP-7096. Allow setting of end-of-record delimiter for TextInputFormat
(Ahmed Radwan via todd)
HADOOP-6994. Api to get delegation token in AbstractFileSystem. (jitendra)
IMPROVEMENTS
HADOOP-7042. Updates to test-patch.sh to include failed test names and

View File

@ -25,6 +25,7 @@ import java.net.URI;
import java.net.URISyntaxException;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.StringTokenizer;
@ -42,6 +43,8 @@ import org.apache.hadoop.fs.Options.Rename;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.InvalidPathException;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable;
/**
@ -885,4 +888,40 @@ public abstract class AbstractFileSystem {
*/
public abstract void setVerifyChecksum(final boolean verifyChecksum)
throws AccessControlException, IOException;
/**
* Get a canonical name for this file system.
* @return a URI string that uniquely identifies this file system
*/
public String getCanonicalServiceName() {
return SecurityUtil.buildDTServiceName(getUri(), getUriDefaultPort());
}
/**
* Get one or more delegation tokens associated with the filesystem. Normally
* a file system returns a single delegation token. A file system that manages
* multiple file systems underneath, could return set of delegation tokens for
* all the file systems it manages
*
* @param renewer the account name that is allowed to renew the token.
* @return List of delegation tokens.
* @throws IOException
*/
@InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
return null;
}
@Override //Object
public int hashCode() {
return myUri.hashCode();
}
@Override //Object
public boolean equals(Object other) {
if (other == null || !(other instanceof AbstractFileSystem)) {
return false;
}
return myUri.equals(((AbstractFileSystem) other).myUri);
}
}

View File

@ -22,12 +22,17 @@ import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.EnumSet;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
import org.apache.hadoop.util.Progressable;
/**
@ -207,4 +212,14 @@ public abstract class DelegateToFileSystem extends AbstractFileSystem {
*/
throw new AssertionError();
}
@Override //AbstractFileSystem
public String getCanonicalServiceName() {
return fsImpl.getCanonicalServiceName();
}
@Override //AbstractFileSystem
public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
return fsImpl.getDelegationTokens(renewer);
}
}

View File

@ -25,6 +25,7 @@ import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
@ -49,6 +50,7 @@ import org.apache.hadoop.ipc.RpcServerException;
import org.apache.hadoop.ipc.UnexpectedServerException;
import org.apache.hadoop.fs.InvalidPathException;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.token.Token;
/**
* The FileContext class provides an interface to the application writer for
@ -2173,6 +2175,30 @@ public final class FileContext {
}
}.resolve(this, f).getPath();
}
/**
* Returns the list of AbstractFileSystems accessed in the path. The list may
* contain more than one AbstractFileSystems objects in case of symlinks.
*
* @param f
* Path which needs to be resolved
* @return List of AbstractFileSystems accessed in the path
* @throws IOException
*/
Set<AbstractFileSystem> resolveAbstractFileSystems(final Path f)
throws IOException {
final Path absF = fixRelativePart(f);
final HashSet<AbstractFileSystem> result = new HashSet<AbstractFileSystem>();
new FSLinkResolver<Void>() {
public Void next(final AbstractFileSystem fs, final Path p)
throws IOException, UnresolvedLinkException {
result.add(fs);
fs.getFileStatus(p);
return null;
}
}.resolve(this, absF);
return result;
}
/**
* Class used to perform an operation on and resolve symlinks in a
@ -2266,4 +2292,25 @@ public final class FileContext {
public static Map<URI, Statistics> getAllStatistics() {
return AbstractFileSystem.getAllStatistics();
}
/**
* Get delegation tokens for the file systems accessed for a given
* path.
* @param p Path for which delegations tokens are requested.
* @param renewer the account name that is allowed to renew the token.
* @return List of delegation tokens.
* @throws IOException
*/
@InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
public List<Token<?>> getDelegationTokens(
Path p, String renewer) throws IOException {
Set<AbstractFileSystem> afsSet = resolveAbstractFileSystems(p);
List<Token<?>> tokenList =
new ArrayList<Token<?>>();
for (AbstractFileSystem afs : afsSet) {
List<Token<?>> afsTokens = afs.getDelegationTokens(renewer);
tokenList.addAll(afsTokens);
}
return tokenList;
}
}

View File

@ -50,6 +50,8 @@ import org.apache.hadoop.io.MultipleIOException;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils;
@ -363,9 +365,25 @@ public abstract class FileSystem extends Configured implements Closeable {
* @return a new delegation token
* @throws IOException
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public Token<?> getDelegationToken(String renewer) throws IOException {
return null;
}
/**
* Get one or more delegation tokens associated with the filesystem. Normally
* a file system returns a single delegation token. A file system that manages
* multiple file systems underneath, could return set of delegation tokens for
* all the file systems it manages.
*
* @param renewer the account name that is allowed to renew the token.
* @return list of new delegation tokens
* @throws IOException
*/
@InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
return null;
}
/** create a file with the provided permission
* The permission of the file is set to be the provided permission as in

View File

@ -21,12 +21,13 @@ package org.apache.hadoop.fs;
import java.io.*;
import java.net.URI;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable;
/****************************************************************
@ -366,4 +367,19 @@ public class FilterFileSystem extends FileSystem {
throws IOException {
return fs.primitiveMkdir(f, abdolutePermission);
}
@Override // FileSystem
public String getCanonicalServiceName() {
return fs.getCanonicalServiceName();
}
@Override // FileSystem
public Token<?> getDelegationToken(String renewer) throws IOException {
return fs.getDelegationToken(renewer);
}
@Override // FileSystem
public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
return fs.getDelegationTokens(renewer);
}
}

View File

@ -21,12 +21,14 @@ import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.EnumSet;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem.Statistics;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable;
/**
@ -261,4 +263,14 @@ public abstract class FilterFs extends AbstractFileSystem {
public Path getLinkTarget(final Path f) throws IOException {
return myFs.getLinkTarget(f);
}
@Override // AbstractFileSystem
public String getCanonicalServiceName() {
return myFs.getCanonicalServiceName();
}
@Override // AbstractFileSystem
public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
return myFs.getDelegationTokens(renewer);
}
}

View File

@ -0,0 +1,63 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import java.io.IOException;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* Tests resolution of AbstractFileSystems for a given path with symlinks.
*/
public class TestFileContextResolveAfs {
private static String TEST_ROOT_DIR_LOCAL
= System.getProperty("test.build.data","build/test/data/work-dir/localfs");
private FileContext fc;
private FileSystem localFs;
@Before
public void setup() throws IOException {
fc = FileContext.getFileContext();
}
@Test
public void testFileContextResolveAfs() throws IOException {
Configuration conf = new Configuration();
localFs = FileSystem.get(conf);
Path localPath = new Path(TEST_ROOT_DIR_LOCAL + "/file1");
Path linkPath = new Path("file:///" + TEST_ROOT_DIR_LOCAL + "/file2");
localFs.mkdirs(new Path(TEST_ROOT_DIR_LOCAL));
localFs.create(localPath);
fc.createSymlink(localPath, linkPath, true);
Set<AbstractFileSystem> afsList = fc.resolveAbstractFileSystems(linkPath);
Assert.assertEquals(1, afsList.size());
localFs.deleteOnExit(localPath);
localFs.deleteOnExit(linkPath);
localFs.close();
}
}