MAPREDUCE-3149. Add a test to verify that TokenCache handles file system uri with no authority. Contributed by John George.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1186516 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
0bd8f0bd40
commit
ba66ca6856
|
@ -25,6 +25,9 @@ Trunk (unreleased changes)
|
||||||
MAPREDUCE-3171. normalize nodemanager native code compilation with common/hdfs
|
MAPREDUCE-3171. normalize nodemanager native code compilation with common/hdfs
|
||||||
native. (tucu)
|
native. (tucu)
|
||||||
|
|
||||||
|
MAPREDUCE-3149. Add a test to verify that TokenCache handles file system
|
||||||
|
uri with no authority. (John George via jitendra)
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
||||||
MAPREDUCE-3166. [Rumen] Make Rumen use job history api instead of relying
|
MAPREDUCE-3166. [Rumen] Make Rumen use job history api instead of relying
|
||||||
|
|
|
@ -63,6 +63,7 @@ import org.apache.hadoop.security.Credentials;
|
||||||
import org.apache.hadoop.security.SecurityUtil;
|
import org.apache.hadoop.security.SecurityUtil;
|
||||||
import org.apache.hadoop.security.token.Token;
|
import org.apache.hadoop.security.token.Token;
|
||||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||||
|
import org.apache.hadoop.tools.HadoopArchives;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.codehaus.jackson.map.ObjectMapper;
|
import org.codehaus.jackson.map.ObjectMapper;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
|
@ -428,4 +429,29 @@ public class TestTokenCache {
|
||||||
assertTrue("didn't find token for [" + lp1 + ", " + lp2 + "]", found);
|
assertTrue("didn't find token for [" + lp1 + ", " + lp2 + "]", found);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGetTokensForUriWithoutAuth() throws IOException {
|
||||||
|
FileSystem fs = dfsCluster.getFileSystem();
|
||||||
|
HadoopArchives har = new HadoopArchives(jConf);
|
||||||
|
Path archivePath = new Path(fs.getHomeDirectory(), "tmp");
|
||||||
|
String[] args = new String[6];
|
||||||
|
args[0] = "-archiveName";
|
||||||
|
args[1] = "foo1.har";
|
||||||
|
args[2] = "-p";
|
||||||
|
args[3] = fs.getHomeDirectory().toString();
|
||||||
|
args[4] = "test";
|
||||||
|
args[5] = archivePath.toString();
|
||||||
|
try {
|
||||||
|
int ret = ToolRunner.run(har, args);
|
||||||
|
} catch (Exception e) {
|
||||||
|
fail("Could not create har file");
|
||||||
|
}
|
||||||
|
Path finalPath = new Path(archivePath, "foo1.har");
|
||||||
|
Path filePath = new Path(finalPath, "test");
|
||||||
|
|
||||||
|
Credentials credentials = new Credentials();
|
||||||
|
TokenCache.obtainTokensForNamenodesInternal(
|
||||||
|
credentials, new Path [] {finalPath}, jConf);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue