MAPREDUCE-3736. Variable substitution depth too large for fs.default.name causes jobs to fail (ahmed via tucu)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1244264 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Alejandro Abdelnur 2012-02-14 23:09:05 +00:00
parent 83ab8f087b
commit 89bd2210a7
13 changed files with 24 additions and 14 deletions

View File

@ -216,6 +216,13 @@
determine the host, port, etc. for a filesystem.</description> determine the host, port, etc. for a filesystem.</description>
</property> </property>
<property>
<name>fs.default.name</name>
<value>file:///</value>
<description>Deprecated. Use (fs.defaultFS) property
instead</description>
</property>
<property> <property>
<name>fs.trash.interval</name> <name>fs.trash.interval</name>
<value>0</value> <value>0</value>

View File

@ -106,7 +106,10 @@ Release 0.23.2 - UNRELEASED
MAPREDUCE-3852. Test TestLinuxResourceCalculatorPlugin failing. (Thomas MAPREDUCE-3852. Test TestLinuxResourceCalculatorPlugin failing. (Thomas
Graves via mahadev) Graves via mahadev)
MAPREDUCE-3736. Variable substitution depth too large for fs.default.name
causes jobs to fail (ahmed via tucu).
Release 0.23.1 - 2012-02-08 Release 0.23.1 - 2012-02-08
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -159,7 +159,7 @@ public class TestMRWithDistributedCache extends TestCase {
public void testLocalJobRunner() throws Exception { public void testLocalJobRunner() throws Exception {
Configuration c = new Configuration(); Configuration c = new Configuration();
c.set(JTConfig.JT_IPC_ADDRESS, "local"); c.set(JTConfig.JT_IPC_ADDRESS, "local");
c.set("fs.default.name", "file:///"); c.set("fs.defaultFS", "file:///");
testWithConf(c); testWithConf(c);
} }

View File

@ -59,7 +59,7 @@ public class TestNoDefaultsJobConf extends HadoopTestCase {
JobConf conf = new JobConf(false); JobConf conf = new JobConf(false);
conf.set("fs.default.name", createJobConf().get("fs.default.name")); conf.set("fs.defaultFS", createJobConf().get("fs.defaultFS"));
conf.setJobName("mr"); conf.setJobName("mr");
@ -100,4 +100,4 @@ public class TestNoDefaultsJobConf extends HadoopTestCase {
} }
} }

View File

@ -1024,7 +1024,7 @@ public class JHLogAnalyzer {
if(testFile != null) { if(testFile != null) {
LOG.info("Start JHLA test ============ "); LOG.info("Start JHLA test ============ ");
LocalFileSystem lfs = FileSystem.getLocal(conf); LocalFileSystem lfs = FileSystem.getLocal(conf);
conf.set("fs.default.name", "file:///"); conf.set("fs.defaultFS", "file:///");
JHLAMapper map = new JHLAMapper(conf); JHLAMapper map = new JHLAMapper(conf);
map.parseLogFile(lfs, new Path(testFile), 0L, map.parseLogFile(lfs, new Path(testFile), 0L,
new LoggingCollector(), Reporter.NULL); new LoggingCollector(), Reporter.NULL);

View File

@ -53,7 +53,7 @@ public class FileBench extends Configured implements Tool {
"unless they are also explicitly included, as in \"-pln -zip\"\n" + "unless they are also explicitly included, as in \"-pln -zip\"\n" +
"Note that CompressionType params only apply to SequenceFiles\n\n" + "Note that CompressionType params only apply to SequenceFiles\n\n" +
"Useful options to set:\n" + "Useful options to set:\n" +
"-D fs.default.name=\"file:///\" \\\n" + "-D fs.defaultFS=\"file:///\" \\\n" +
"-D fs.file.impl=org.apache.hadoop.fs.RawLocalFileSystem \\\n" + "-D fs.file.impl=org.apache.hadoop.fs.RawLocalFileSystem \\\n" +
"-D filebench.file.bytes=$((10*1024*1024*1024)) \\\n" + "-D filebench.file.bytes=$((10*1024*1024*1024)) \\\n" +
"-D filebench.key.words=5 \\\n" + "-D filebench.key.words=5 \\\n" +

View File

@ -41,7 +41,7 @@ public class TestCombineFileInputFormat {
private static FileSystem localFs = null; private static FileSystem localFs = null;
static { static {
try { try {
defaultConf.set("fs.default.name", "file:///"); defaultConf.set("fs.defaultFS", "file:///");
localFs = FileSystem.getLocal(defaultConf); localFs = FileSystem.getLocal(defaultConf);
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException("init failure", e); throw new RuntimeException("init failure", e);

View File

@ -69,7 +69,7 @@ public class TestConcatenatedCompressedInput {
static { static {
try { try {
defaultConf.set("fs.default.name", "file:///"); defaultConf.set("fs.defaultFS", "file:///");
localFs = FileSystem.getLocal(defaultConf); localFs = FileSystem.getLocal(defaultConf);
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException("init failure", e); throw new RuntimeException("init failure", e);

View File

@ -55,7 +55,7 @@ public class TestTextInputFormat {
private static FileSystem localFs = null; private static FileSystem localFs = null;
static { static {
try { try {
defaultConf.set("fs.default.name", "file:///"); defaultConf.set("fs.defaultFS", "file:///");
localFs = FileSystem.getLocal(defaultConf); localFs = FileSystem.getLocal(defaultConf);
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException("init failure", e); throw new RuntimeException("init failure", e);

View File

@ -314,7 +314,7 @@ public class TestMapCollection {
job.setNumReduceTasks(1); job.setNumReduceTasks(1);
job.getConfiguration().set(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME); job.getConfiguration().set(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME);
job.getConfiguration().setInt(MRJobConfig.IO_SORT_FACTOR, 1000); job.getConfiguration().setInt(MRJobConfig.IO_SORT_FACTOR, 1000);
job.getConfiguration().set("fs.default.name", "file:///"); job.getConfiguration().set("fs.defaultFS", "file:///");
job.getConfiguration().setInt("test.mapcollection.num.maps", 1); job.getConfiguration().setInt("test.mapcollection.num.maps", 1);
job.setInputFormatClass(FakeIF.class); job.setInputFormatClass(FakeIF.class);
job.setOutputFormatClass(NullOutputFormat.class); job.setOutputFormatClass(NullOutputFormat.class);

View File

@ -45,9 +45,9 @@ public class TestFileInputFormat {
@Test @Test
public void testAddInputPath() throws IOException { public void testAddInputPath() throws IOException {
final Configuration conf = new Configuration(); final Configuration conf = new Configuration();
conf.set("fs.default.name", "s3://abc:xyz@hostname/"); conf.set("fs.defaultFS", "s3://abc:xyz@hostname/");
final Job j = Job.getInstance(conf); final Job j = Job.getInstance(conf);
j.getConfiguration().set("fs.default.name", "s3://abc:xyz@hostname/"); j.getConfiguration().set("fs.defaultFS", "s3://abc:xyz@hostname/");
//setup default fs //setup default fs
final FileSystem defaultfs = FileSystem.get(conf); final FileSystem defaultfs = FileSystem.get(conf);

View File

@ -57,7 +57,7 @@ public class TestMRKeyValueTextInputFormat {
private static FileSystem localFs = null; private static FileSystem localFs = null;
static { static {
try { try {
defaultConf.set("fs.default.name", "file:///"); defaultConf.set("fs.defaultFS", "file:///");
localFs = FileSystem.getLocal(defaultConf); localFs = FileSystem.getLocal(defaultConf);
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException("init failure", e); throw new RuntimeException("init failure", e);

View File

@ -457,7 +457,7 @@
<property> <property>
<name>mapreduce.job.hdfs-servers</name> <name>mapreduce.job.hdfs-servers</name>
<value>${fs.default.name}</value> <value>${fs.defaultFS}</value>
</property> </property>
<!-- WebAppProxy Configuration--> <!-- WebAppProxy Configuration-->